## P1 数据库基础 - zqyy_app: 创建 auth/biz schema、FDW 连接 etl_feiqiu - etl_feiqiu: 创建 app schema RLS 视图、商品库存预警表 - 清理 assistant_abolish 残留数据 ## P2 ETL/DWS 扩展 - 新增 DWS 助教订单贡献度表 (dws.assistant_order_contribution) - 新增 assistant_order_contribution_task 任务及 RLS 视图 - member_consumption 增加充值字段、assistant_daily 增加处罚字段 - 更新 ODS/DWD/DWS 任务文档及业务规则文档 - 更新 consistency_checker、flow_runner、task_registry 等核心模块 ## P3 小程序鉴权系统 - 新增 xcx_auth 路由/schema(微信登录 + JWT) - 新增 wechat/role/matching/application 服务层 - zqyy_app 鉴权表迁移 + 角色权限种子数据 - auth/dependencies.py 支持小程序 JWT 鉴权 ## 文档与审计 - 新增 DOCUMENTATION-MAP 文档导航 - 新增 7 份 BD_Manual 数据库变更文档 - 更新 DDL 基线快照(etl_feiqiu 6 schema + zqyy_app auth) - 新增全栈集成审计记录、部署检查清单更新 - 新增 BACKLOG 路线图、FDW→Core 迁移计划 ## Kiro 工程化 - 新增 5 个 Spec(P1/P2/P3/全栈集成/核心业务) - 新增审计自动化脚本(agent_on_stop/build_audit_context/compliance_prescan) - 新增 6 个 Hook(合规检查/会话日志/提交审计等) - 新增 doc-map steering 文件 ## 运维与测试 - 新增 ops 脚本:迁移验证/API 健康检查/ETL 监控/集成报告 - 新增属性测试:test_dws_contribution / test_auth_system - 清理过期 export 报告文件 - 更新 .gitignore 排除规则
234 lines
6.4 KiB
Python
234 lines
6.4 KiB
Python
#!/usr/bin/env python3
|
||
"""prompt_on_submit — promptSubmit 合并 hook 脚本。
|
||
|
||
合并原 audit_flagger + prompt_audit_log 的功能:
|
||
1. git status → 风险判定 → 写 .kiro/.audit_state.json
|
||
2. 记录 prompt 日志 → docs/audit/prompt_logs/
|
||
3. 记录当前 git fingerprint 快照 → .kiro/.git_snapshot.json(供 agentStop 对比)
|
||
|
||
所有功能块用 try/except 隔离,单个失败不影响其他。
|
||
"""
|
||
|
||
import hashlib
|
||
import json
|
||
import os
|
||
import re
|
||
import subprocess
|
||
import sys
|
||
from datetime import datetime, timezone, timedelta
|
||
|
||
TZ_TAIPEI = timezone(timedelta(hours=8))
|
||
|
||
# ── 风险规则(来自 audit_flagger) ──
|
||
RISK_RULES = [
|
||
(re.compile(r"^apps/etl/connectors/feiqiu/(api|cli|config|database|loaders|models|orchestration|scd|tasks|utils|quality)/"), "etl"),
|
||
(re.compile(r"^apps/backend/app/"), "backend"),
|
||
(re.compile(r"^apps/admin-web/src/"), "admin-web"),
|
||
(re.compile(r"^apps/miniprogram/(miniapp|miniprogram)/"), "miniprogram"),
|
||
(re.compile(r"^packages/shared/"), "shared"),
|
||
(re.compile(r"^db/"), "db"),
|
||
]
|
||
|
||
NOISE_PATTERNS = [
|
||
re.compile(r"^docs/audit/"),
|
||
re.compile(r"^\.kiro/"),
|
||
re.compile(r"^tmp/"),
|
||
re.compile(r"^\.hypothesis/"),
|
||
]
|
||
|
||
DB_PATTERNS = [
|
||
re.compile(r"^db/"),
|
||
re.compile(r"/migrations/"),
|
||
re.compile(r"\.sql$"),
|
||
re.compile(r"\.prisma$"),
|
||
]
|
||
|
||
STATE_PATH = os.path.join(".kiro", ".audit_state.json")
|
||
SNAPSHOT_PATH = os.path.join(".kiro", ".git_snapshot.json")
|
||
PROMPT_ID_PATH = os.path.join(".kiro", ".last_prompt_id.json")
|
||
|
||
|
||
def now_taipei():
|
||
return datetime.now(TZ_TAIPEI)
|
||
|
||
|
||
def sha1hex(s: str) -> str:
|
||
return hashlib.sha1(s.encode("utf-8")).hexdigest()
|
||
|
||
|
||
def get_changed_files() -> list[str]:
|
||
try:
|
||
r = subprocess.run(
|
||
["git", "status", "--porcelain"],
|
||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=10
|
||
)
|
||
if r.returncode != 0:
|
||
return []
|
||
except Exception:
|
||
return []
|
||
files = []
|
||
for line in r.stdout.splitlines():
|
||
if len(line) < 4:
|
||
continue
|
||
path = line[3:].strip()
|
||
if " -> " in path:
|
||
path = path.split(" -> ")[-1]
|
||
path = path.strip().strip('"').replace("\\", "/")
|
||
if path:
|
||
files.append(path)
|
||
return files
|
||
|
||
|
||
def is_noise(f: str) -> bool:
|
||
return any(p.search(f) for p in NOISE_PATTERNS)
|
||
|
||
|
||
def safe_read_json(path):
|
||
if not os.path.isfile(path):
|
||
return {}
|
||
try:
|
||
with open(path, "r", encoding="utf-8") as f:
|
||
return json.load(f)
|
||
except Exception:
|
||
return {}
|
||
|
||
|
||
def write_json(path, data):
|
||
os.makedirs(os.path.dirname(path) or ".kiro", exist_ok=True)
|
||
with open(path, "w", encoding="utf-8") as f:
|
||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||
|
||
|
||
# ── 功能块 1:风险标记(audit_flagger) ──
|
||
def do_audit_flag(all_files, now):
|
||
files = sorted(set(f for f in all_files if not is_noise(f)))
|
||
|
||
if not files:
|
||
write_json(STATE_PATH, {
|
||
"audit_required": False,
|
||
"db_docs_required": False,
|
||
"reasons": [],
|
||
"changed_files": [],
|
||
"change_fingerprint": "",
|
||
"marked_at": now.isoformat(),
|
||
"last_reminded_at": None,
|
||
})
|
||
return
|
||
|
||
reasons = []
|
||
audit_required = False
|
||
db_docs_required = False
|
||
|
||
for f in files:
|
||
for pattern, label in RISK_RULES:
|
||
if pattern.search(f):
|
||
audit_required = True
|
||
tag = f"dir:{label}"
|
||
if tag not in reasons:
|
||
reasons.append(tag)
|
||
if "/" not in f:
|
||
audit_required = True
|
||
if "root-file" not in reasons:
|
||
reasons.append("root-file")
|
||
if any(p.search(f) for p in DB_PATTERNS):
|
||
db_docs_required = True
|
||
if "db-schema-change" not in reasons:
|
||
reasons.append("db-schema-change")
|
||
|
||
fp = sha1hex("\n".join(files))
|
||
|
||
# 保留已有 last_reminded_at
|
||
last_reminded = None
|
||
existing = safe_read_json(STATE_PATH)
|
||
if existing.get("change_fingerprint") == fp:
|
||
last_reminded = existing.get("last_reminded_at")
|
||
|
||
write_json(STATE_PATH, {
|
||
"audit_required": audit_required,
|
||
"db_docs_required": db_docs_required,
|
||
"reasons": reasons,
|
||
"changed_files": files[:50],
|
||
"change_fingerprint": fp,
|
||
"marked_at": now.isoformat(),
|
||
"last_reminded_at": last_reminded,
|
||
})
|
||
|
||
|
||
# ── 功能块 2:Prompt 日志 ──
|
||
def do_prompt_log(now):
|
||
prompt_id = f"P{now.strftime('%Y%m%d-%H%M%S')}"
|
||
prompt_raw = os.environ.get("USER_PROMPT", "")
|
||
|
||
if len(prompt_raw) > 20000:
|
||
prompt_raw = prompt_raw[:5000] + "\n[TRUNCATED: prompt too long]"
|
||
|
||
summary = " ".join(prompt_raw.split()).strip()
|
||
if len(summary) > 120:
|
||
summary = summary[:120] + "…"
|
||
if not summary:
|
||
summary = "(empty prompt)"
|
||
|
||
log_dir = os.path.join("docs", "audit", "prompt_logs")
|
||
os.makedirs(log_dir, exist_ok=True)
|
||
filename = f"prompt_log_{now.strftime('%Y%m%d_%H%M%S')}.md"
|
||
entry = f"""- [{prompt_id}] {now.strftime('%Y-%m-%d %H:%M:%S %z')}
|
||
- summary: {summary}
|
||
- prompt:
|
||
```text
|
||
{prompt_raw}
|
||
```
|
||
"""
|
||
with open(os.path.join(log_dir, filename), "w", encoding="utf-8") as f:
|
||
f.write(entry)
|
||
|
||
write_json(PROMPT_ID_PATH, {"prompt_id": prompt_id, "at": now.isoformat()})
|
||
|
||
|
||
# ── 功能块 3:Git 快照(供 agentStop 对比检测非 Kiro 变更) ──
|
||
def do_git_snapshot(all_files, now):
|
||
fp = sha1hex("\n".join(sorted(all_files))) if all_files else ""
|
||
write_json(SNAPSHOT_PATH, {
|
||
"files": sorted(all_files)[:100],
|
||
"fingerprint": fp,
|
||
"taken_at": now.isoformat(),
|
||
})
|
||
|
||
|
||
def main():
|
||
# 非 git 仓库直接退出
|
||
try:
|
||
r = subprocess.run(
|
||
["git", "rev-parse", "--is-inside-work-tree"],
|
||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=5
|
||
)
|
||
if r.returncode != 0:
|
||
return
|
||
except Exception:
|
||
return
|
||
|
||
now = now_taipei()
|
||
all_files = get_changed_files()
|
||
|
||
# 各功能块独立 try/except
|
||
try:
|
||
do_audit_flag(all_files, now)
|
||
except Exception:
|
||
pass
|
||
|
||
try:
|
||
do_prompt_log(now)
|
||
except Exception:
|
||
pass
|
||
|
||
try:
|
||
do_git_snapshot(all_files, now)
|
||
except Exception:
|
||
pass
|
||
|
||
|
||
if __name__ == "__main__":
|
||
try:
|
||
main()
|
||
except Exception:
|
||
pass
|