chore: v1 整理 — 清理历史文件、DDL 合并、文档归档

- 清理 1155 个已删除的历史文件(废弃 prompt_logs、tmp、旧 ops 脚本)
- export/ 数据文件从 git 移除(已在 .gitignore)
- demo-miniprogram 从 tmp/ 移入 apps/,添加 CLAUDE.md 注解
- DDL 合并:完整 schema 定义填充到 db/*/schemas/(从 docs/database/ddl/ 复制)
- 39 个 v1 迁移脚本归档到 db/_archived/migrations_v1_merged/
- 4 个迁移变更类 BD_Manual 文档归档到 docs/database/_archived/
- .gitignore 补充 .vite/ 和 apps/*.zip
- settings.json 添加 effortLevel 默认配置
- scripts/ops/ 新增运维脚本入库

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Neo
2026-04-06 00:39:27 +08:00
parent 6f8f12314f
commit 779b2f6d52
1340 changed files with 9124 additions and 132087 deletions

View File

@@ -22,12 +22,16 @@ from dotenv import load_dotenv
load_dotenv()
BAILIAN_API_KEY = os.environ.get("BAILIAN_API_KEY", "")
if not BAILIAN_API_KEY:
raise RuntimeError("BAILIAN_API_KEY 未设置,请检查 .env 文件")
MODEL_NAME = os.environ.get("BAILIAN_MODEL", "qwen-plus")
BASE_URL = os.environ.get("BAILIAN_BASE_URL",
"https://dashscope.aliyuncs.com/compatible-mode/v1")
import dashscope
from dashscope import Application
DASHSCOPE_API_KEY = os.environ.get("DASHSCOPE_API_KEY", "")
if not DASHSCOPE_API_KEY:
raise RuntimeError("DASHSCOPE_API_KEY 未设置,请检查 .env 文件")
SUMMARY_APP_ID = os.environ.get("DASHSCOPE_APP_ID_SUMMARY", "")
if not SUMMARY_APP_ID:
raise RuntimeError("DASHSCOPE_APP_ID_SUMMARY 未设置,请检查 .env 文件")
dashscope.api_key = DASHSCOPE_API_KEY
# 导入索引管理函数
sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
@@ -129,25 +133,25 @@ def load_md_content(eid: str, entry: dict) -> str | None:
async def generate_one(
client,
content: str,
semaphore: asyncio.Semaphore,
max_retries: int = 3,
) -> str:
"""调用百炼 API 生成摘要,带限流和指数退避。"""
"""调用百炼 Application API 生成摘要,带限流和指数退避。"""
# CHANGE 2026-03-22 | openai SDK → dashscope Application APIP14 迁移收尾)
prompt = f"请为以下单轮执行记录生成摘要:\n\n{content}"
async with semaphore:
for attempt in range(max_retries):
try:
resp = await client.chat.completions.create(
model=MODEL_NAME,
messages=[
{"role": "system", "content": SYSTEM_PROMPT},
{"role": "user",
"content": f"请为以下单轮执行记录生成摘要:\n\n{content}"},
],
max_tokens=4096,
resp = await asyncio.to_thread(
Application.call,
app_id=SUMMARY_APP_ID,
prompt=prompt,
)
return resp.choices[0].message.content.strip()
if resp.status_code == 200:
return (resp.output.get("text", "") or "").strip()
else:
raise RuntimeError(f"Application.call 失败: {resp.message}")
except Exception as e:
if attempt < max_retries - 1:
wait = 2 ** attempt
@@ -159,7 +163,6 @@ async def generate_one(
async def process_target(
client,
eid: str,
entry: dict,
semaphore: asyncio.Semaphore,
@@ -168,15 +171,13 @@ async def process_target(
content = load_md_content(eid, entry)
if not content:
return (eid, "")
desc = await generate_one(client, content, semaphore)
desc = await generate_one(content, semaphore)
return (eid, desc)
async def main():
import argparse
from openai import AsyncOpenAI
parser = argparse.ArgumentParser(description="批量生成 session 摘要")
parser.add_argument("--limit", type=int, default=0,
help="只处理前 N 条0=全量)")
@@ -212,7 +213,6 @@ async def main():
print(f" ... 还有 {len(targets) - 20}")
return
client = AsyncOpenAI(api_key=BAILIAN_API_KEY, base_url=BASE_URL)
semaphore = asyncio.Semaphore(args.concurrency)
t0 = time.time()
@@ -227,7 +227,7 @@ async def main():
total_batches = (len(targets) + batch_size - 1) // batch_size
print(f"\n📦 批次 {batch_num}/{total_batches}{len(batch)} 条)...")
tasks = [process_target(client, eid, ent, semaphore)
tasks = [process_target(eid, ent, semaphore)
for eid, ent in batch]
results = await asyncio.gather(*tasks)