feat: P1-P3 全栈集成 — 数据库基础 + DWS 扩展 + 小程序鉴权 + 工程化体系
## P1 数据库基础 - zqyy_app: 创建 auth/biz schema、FDW 连接 etl_feiqiu - etl_feiqiu: 创建 app schema RLS 视图、商品库存预警表 - 清理 assistant_abolish 残留数据 ## P2 ETL/DWS 扩展 - 新增 DWS 助教订单贡献度表 (dws.assistant_order_contribution) - 新增 assistant_order_contribution_task 任务及 RLS 视图 - member_consumption 增加充值字段、assistant_daily 增加处罚字段 - 更新 ODS/DWD/DWS 任务文档及业务规则文档 - 更新 consistency_checker、flow_runner、task_registry 等核心模块 ## P3 小程序鉴权系统 - 新增 xcx_auth 路由/schema(微信登录 + JWT) - 新增 wechat/role/matching/application 服务层 - zqyy_app 鉴权表迁移 + 角色权限种子数据 - auth/dependencies.py 支持小程序 JWT 鉴权 ## 文档与审计 - 新增 DOCUMENTATION-MAP 文档导航 - 新增 7 份 BD_Manual 数据库变更文档 - 更新 DDL 基线快照(etl_feiqiu 6 schema + zqyy_app auth) - 新增全栈集成审计记录、部署检查清单更新 - 新增 BACKLOG 路线图、FDW→Core 迁移计划 ## Kiro 工程化 - 新增 5 个 Spec(P1/P2/P3/全栈集成/核心业务) - 新增审计自动化脚本(agent_on_stop/build_audit_context/compliance_prescan) - 新增 6 个 Hook(合规检查/会话日志/提交审计等) - 新增 doc-map steering 文件 ## 运维与测试 - 新增 ops 脚本:迁移验证/API 健康检查/ETL 监控/集成报告 - 新增属性测试:test_dws_contribution / test_auth_system - 清理过期 export 报告文件 - 更新 .gitignore 排除规则
This commit is contained in:
31
.gitignore
vendored
31
.gitignore
vendored
@@ -8,28 +8,32 @@ __pycache__/
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
pytest-cache-files-*/
|
||||
# logs/
|
||||
logs/
|
||||
*.log
|
||||
*.jsonl
|
||||
|
||||
# ===== 审计文件 =====
|
||||
docs/audit/
|
||||
|
||||
# ===== 运行时产出 =====
|
||||
#export/
|
||||
#reports/
|
||||
# scripts/logs/
|
||||
export/
|
||||
reports/
|
||||
scripts/logs/
|
||||
|
||||
|
||||
# ===== 环境配置(保留模板) =====
|
||||
# .env
|
||||
# .env.local
|
||||
# !.env.template
|
||||
.env
|
||||
.env.local
|
||||
!.env.template
|
||||
|
||||
# ===== Node =====
|
||||
node_modules/
|
||||
|
||||
# ===== Python 虚拟环境 =====
|
||||
# .venv/
|
||||
# venv/
|
||||
# ENV/
|
||||
# env/
|
||||
.venv/
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
|
||||
# ===== Python 构建产物 =====
|
||||
.Python
|
||||
@@ -53,6 +57,9 @@ dist/
|
||||
.coverage
|
||||
htmlcov/
|
||||
|
||||
# ===== 测试 =====
|
||||
tests/
|
||||
|
||||
# ===== infra 敏感文件 =====
|
||||
infra/**/*.key
|
||||
infra/**/*.pem
|
||||
@@ -74,3 +81,5 @@ infra/**/*.secret
|
||||
# ===== Kiro 运行时状态 =====
|
||||
.kiro/.audit_state.json
|
||||
.kiro/.last_prompt_id.json
|
||||
|
||||
|
||||
|
||||
526
.kiro/.audit_context.json
Normal file
526
.kiro/.audit_context.json
Normal file
File diff suppressed because one or more lines are too long
74
.kiro/.compliance_state.json
Normal file
74
.kiro/.compliance_state.json
Normal file
@@ -0,0 +1,74 @@
|
||||
{
|
||||
"needs_check": true,
|
||||
"scanned_at": "2026-02-26T08:03:23.664569+08:00",
|
||||
"new_migration_sql": [
|
||||
"db/etl_feiqiu/migrations/2025-02-24__alter_assistant_daily_add_penalty_fields.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__alter_member_consumption_add_recharge_fields.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__create_dws_assistant_order_contribution.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__create_rls_view_assistant_order_contribution.sql",
|
||||
"db/etl_feiqiu/migrations/2026-02-24__add_goods_stock_warning_info.sql",
|
||||
"db/etl_feiqiu/migrations/2026-02-24__cleanup_assistant_abolish_residual.sql",
|
||||
"db/etl_feiqiu/migrations/2026-02-24__p1_create_app_schema_rls_views.sql",
|
||||
"db/zqyy_app/migrations/2026-02-24__p1_create_auth_biz_schemas.sql",
|
||||
"db/zqyy_app/migrations/2026-02-24__p1_setup_fdw_etl.sql",
|
||||
"db/zqyy_app/migrations/2026-02-25__p3_create_auth_tables.sql",
|
||||
"db/zqyy_app/migrations/2026-02-25__p3_seed_roles_permissions.sql"
|
||||
],
|
||||
"new_or_modified_sql": [
|
||||
"db/_archived/ddl_baseline_2026-02-22/db/etl_feiqiu/schemas/dwd.sql",
|
||||
"db/_archived/ddl_baseline_2026-02-22/db/etl_feiqiu/schemas/ods.sql",
|
||||
"db/_archived/ddl_baseline_2026-02-22/db/zqyy_app/migrations/2025-02-24__add_fdw_dws_extensions.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__alter_assistant_daily_add_penalty_fields.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__alter_member_consumption_add_recharge_fields.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__create_dws_assistant_order_contribution.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__create_rls_view_assistant_order_contribution.sql",
|
||||
"db/etl_feiqiu/migrations/2026-02-24__add_goods_stock_warning_info.sql",
|
||||
"db/etl_feiqiu/migrations/2026-02-24__cleanup_assistant_abolish_residual.sql",
|
||||
"db/etl_feiqiu/migrations/2026-02-24__p1_create_app_schema_rls_views.sql",
|
||||
"db/etl_feiqiu/seeds/seed_ods_tasks.sql",
|
||||
"db/etl_feiqiu/seeds/seed_scheduler_tasks.sql",
|
||||
"db/zqyy_app/migrations/2026-02-24__p1_create_auth_biz_schemas.sql",
|
||||
"db/zqyy_app/migrations/2026-02-24__p1_setup_fdw_etl.sql",
|
||||
"db/zqyy_app/migrations/2026-02-25__p3_create_auth_tables.sql",
|
||||
"db/zqyy_app/migrations/2026-02-25__p3_seed_roles_permissions.sql",
|
||||
"docs/database/ddl/etl_feiqiu__app.sql",
|
||||
"docs/database/ddl/etl_feiqiu__core.sql",
|
||||
"docs/database/ddl/etl_feiqiu__dwd.sql",
|
||||
"docs/database/ddl/etl_feiqiu__dws.sql",
|
||||
"docs/database/ddl/etl_feiqiu__meta.sql",
|
||||
"docs/database/ddl/etl_feiqiu__ods.sql",
|
||||
"docs/database/ddl/fdw.sql",
|
||||
"docs/database/ddl/zqyy_app__auth.sql",
|
||||
"docs/database/ddl/zqyy_app__public.sql"
|
||||
],
|
||||
"code_without_docs": [
|
||||
{
|
||||
"file": "apps/etl/connectors/feiqiu/orchestration/flow_runner.py",
|
||||
"expected_docs": [
|
||||
"apps/etl/connectors/feiqiu/docs/architecture/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"file": "apps/etl/connectors/feiqiu/orchestration/task_executor.py",
|
||||
"expected_docs": [
|
||||
"apps/etl/connectors/feiqiu/docs/architecture/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"file": "apps/etl/connectors/feiqiu/orchestration/task_registry.py",
|
||||
"expected_docs": [
|
||||
"apps/etl/connectors/feiqiu/docs/architecture/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"file": "apps/etl/connectors/feiqiu/orchestration/topological_sort.py",
|
||||
"expected_docs": [
|
||||
"apps/etl/connectors/feiqiu/docs/architecture/"
|
||||
]
|
||||
}
|
||||
],
|
||||
"new_files": [],
|
||||
"has_bd_manual": true,
|
||||
"has_audit_record": false,
|
||||
"has_ddl_baseline": true
|
||||
}
|
||||
106
.kiro/.git_snapshot.json
Normal file
106
.kiro/.git_snapshot.json
Normal file
@@ -0,0 +1,106 @@
|
||||
{
|
||||
"files": [
|
||||
".gitignore",
|
||||
".kiro/.audit_context.json",
|
||||
".kiro/.compliance_state.json",
|
||||
".kiro/.git_snapshot.json",
|
||||
".kiro/.last_prompt_id.json",
|
||||
".kiro/agents/audit-writer.md",
|
||||
".kiro/hooks/agent-on-stop.kiro.hook",
|
||||
".kiro/hooks/audit-flagger.kiro.hook",
|
||||
".kiro/hooks/audit-reminder.kiro.hook",
|
||||
".kiro/hooks/change-compliance.kiro.hook",
|
||||
".kiro/hooks/prompt-audit-log.kiro.hook",
|
||||
".kiro/hooks/prompt-on-submit.kiro.hook",
|
||||
".kiro/hooks/run-audit-writer.kiro.hook",
|
||||
".kiro/hooks/session-log.kiro.hook",
|
||||
".kiro/scripts/agent_on_stop.py",
|
||||
".kiro/scripts/build_audit_context.py",
|
||||
".kiro/scripts/change_compliance_prescan.py",
|
||||
".kiro/scripts/prompt_on_submit.py",
|
||||
".kiro/scripts/session_log.py",
|
||||
".kiro/specs/01-miniapp-db-foundation/.config.kiro",
|
||||
".kiro/specs/01-miniapp-db-foundation/design.md",
|
||||
".kiro/specs/01-miniapp-db-foundation/requirements.md",
|
||||
".kiro/specs/01-miniapp-db-foundation/tasks.md",
|
||||
".kiro/specs/02-etl-dws-miniapp-extensions/.config.kiro",
|
||||
".kiro/specs/02-etl-dws-miniapp-extensions/design.md",
|
||||
".kiro/specs/02-etl-dws-miniapp-extensions/requirements.md",
|
||||
".kiro/specs/02-etl-dws-miniapp-extensions/tasks.md",
|
||||
".kiro/specs/03-miniapp-auth-system/.config.kiro",
|
||||
".kiro/specs/03-miniapp-auth-system/design.md",
|
||||
".kiro/specs/03-miniapp-auth-system/requirements.md",
|
||||
".kiro/specs/03-miniapp-auth-system/tasks.md",
|
||||
".kiro/specs/[ETL]-fullstack-integration/.config.kiro",
|
||||
".kiro/specs/[ETL]-fullstack-integration/design.md",
|
||||
".kiro/specs/[ETL]-fullstack-integration/requirements.md",
|
||||
".kiro/specs/[ETL]-fullstack-integration/tasks.md",
|
||||
".kiro/specs/etl-fullstack-integration/design.md",
|
||||
".kiro/specs/etl-fullstack-integration/tasks.md",
|
||||
".kiro/specs/miniapp-core-business/.config.kiro",
|
||||
".kiro/specs/miniapp-core-business/requirements.md",
|
||||
".kiro/specs/spi-spending-power-index/tasks.md",
|
||||
".kiro/steering/doc-map.md",
|
||||
"README.md",
|
||||
"apps/admin-web/README.md",
|
||||
"apps/backend/app/auth/dependencies.py",
|
||||
"apps/backend/app/auth/jwt.py",
|
||||
"apps/backend/app/main.py",
|
||||
"apps/backend/app/routers/xcx_auth.py",
|
||||
"apps/backend/app/schemas/xcx_auth.py",
|
||||
"apps/backend/app/services/application.py",
|
||||
"apps/backend/app/services/matching.py",
|
||||
"apps/backend/app/services/role.py",
|
||||
"apps/backend/app/services/task_registry.py",
|
||||
"apps/backend/app/services/wechat.py",
|
||||
"apps/backend/auth_only.txt",
|
||||
"apps/backend/auth_only_results.txt",
|
||||
"apps/backend/auth_test_results.txt",
|
||||
"apps/backend/docs/API-REFERENCE.md",
|
||||
"apps/backend/test_results.txt",
|
||||
"apps/etl/connectors/feiqiu/docs/CHANGELOG.md",
|
||||
"apps/etl/connectors/feiqiu/docs/README.md",
|
||||
"apps/etl/connectors/feiqiu/docs/business-rules/dws_metrics.md",
|
||||
"apps/etl/connectors/feiqiu/docs/business-rules/scd2_rules.md",
|
||||
"apps/etl/connectors/feiqiu/docs/etl_tasks/README.md",
|
||||
"apps/etl/connectors/feiqiu/docs/etl_tasks/base_task_mechanism.md",
|
||||
"apps/etl/connectors/feiqiu/docs/etl_tasks/dws_tasks.md",
|
||||
"apps/etl/connectors/feiqiu/docs/etl_tasks/index_tasks.md",
|
||||
"apps/etl/connectors/feiqiu/docs/etl_tasks/ods_tasks.md",
|
||||
"apps/etl/connectors/feiqiu/docs/operations/environment_setup.md",
|
||||
"apps/etl/connectors/feiqiu/docs/operations/troubleshooting.md",
|
||||
"apps/etl/connectors/feiqiu/orchestration/flow_runner.py",
|
||||
"apps/etl/connectors/feiqiu/orchestration/task_executor.py",
|
||||
"apps/etl/connectors/feiqiu/orchestration/task_registry.py",
|
||||
"apps/etl/connectors/feiqiu/orchestration/topological_sort.py",
|
||||
"apps/etl/connectors/feiqiu/quality/consistency_checker.py",
|
||||
"apps/etl/connectors/feiqiu/scripts/verify_dws_extensions.py",
|
||||
"apps/etl/connectors/feiqiu/tasks/dwd/dwd_load_task.py",
|
||||
"apps/etl/connectors/feiqiu/tasks/dws/__init__.py",
|
||||
"apps/etl/connectors/feiqiu/tasks/dws/assistant_daily_task.py",
|
||||
"apps/etl/connectors/feiqiu/tasks/dws/assistant_order_contribution_task.py",
|
||||
"apps/etl/connectors/feiqiu/tasks/dws/member_consumption_task.py",
|
||||
"apps/etl/connectors/feiqiu/tasks/dws/member_visit_task.py",
|
||||
"apps/etl/connectors/feiqiu/tasks/ods/ods_tasks.py",
|
||||
"apps/etl/connectors/feiqiu/tests/unit/test_topological_sort.py",
|
||||
"apps/miniprogram/README.md",
|
||||
"db/README.md",
|
||||
"db/_archived/ddl_baseline_2026-02-22/db/etl_feiqiu/schemas/dwd.sql",
|
||||
"db/_archived/ddl_baseline_2026-02-22/db/etl_feiqiu/schemas/ods.sql",
|
||||
"db/_archived/ddl_baseline_2026-02-22/db/zqyy_app/migrations/2025-02-24__add_fdw_dws_extensions.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__alter_assistant_daily_add_penalty_fields.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__alter_member_consumption_add_recharge_fields.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__create_dws_assistant_order_contribution.sql",
|
||||
"db/etl_feiqiu/migrations/2025-02-24__create_rls_view_assistant_order_contribution.sql",
|
||||
"db/etl_feiqiu/migrations/2026-02-24__add_goods_stock_warning_info.sql",
|
||||
"db/etl_feiqiu/migrations/2026-02-24__cleanup_assistant_abolish_residual.sql",
|
||||
"db/etl_feiqiu/migrations/2026-02-24__p1_create_app_schema_rls_views.sql",
|
||||
"db/etl_feiqiu/seeds/seed_ods_tasks.sql",
|
||||
"db/etl_feiqiu/seeds/seed_scheduler_tasks.sql",
|
||||
"db/zqyy_app/README.md",
|
||||
"db/zqyy_app/migrations/2026-02-24__p1_create_auth_biz_schemas.sql",
|
||||
"db/zqyy_app/migrations/2026-02-24__p1_setup_fdw_etl.sql"
|
||||
],
|
||||
"fingerprint": "96d0946e775eac6698780fe8290e7e73d762b201",
|
||||
"taken_at": "2026-02-26T08:03:18.159857+08:00"
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"prompt_id": "P20260223-225610",
|
||||
"at": "2026-02-23T22:56:10.365734+08:00"
|
||||
"prompt_id": "P20260226-080318",
|
||||
"at": "2026-02-26T08:03:18.159857+08:00"
|
||||
}
|
||||
@@ -4,41 +4,95 @@ description: Run post-change audit + docs sync for NeoZQYY Monorepo; write audit
|
||||
tools: ["read", "write", "shell"]
|
||||
---
|
||||
|
||||
你是专职"审计收口/后处理写入"子代理。你的执行必须尽量不依赖主对话上下文;优先使用本地仓库事实(git、文件内容、prompt_log)完成审计落盘。
|
||||
你是专职"审计收口/后处理写入"子代理。
|
||||
|
||||
## 核心原则:从预构建上下文工作,禁止全盘扫描
|
||||
|
||||
你的唯一输入是 `.kiro/.audit_context.json`(由 `build_audit_context.py` 预构建)。
|
||||
该文件已包含所有你需要的信息:
|
||||
|
||||
| 字段 | 来源 | 内容 |
|
||||
|------|------|------|
|
||||
| `changed_files` | audit-flagger | 全部变更文件列表 |
|
||||
| `high_risk_files` | audit-flagger | 高风险文件子集 |
|
||||
| `reasons` | audit-flagger | 风险分类标签 |
|
||||
| `high_risk_diff` | git diff | 高风险文件的 diff(已截断) |
|
||||
| `diff_stat` | git diff --stat | 变更统计摘要 |
|
||||
| `compliance.code_without_docs` | compliance-prescan | 缺少文档同步的代码文件及其应更新的文档 |
|
||||
| `compliance.new_migration_sql` | compliance-prescan | 新增迁移 SQL 列表 |
|
||||
| `compliance.has_bd_manual` | compliance-prescan | 是否已有 BD_Manual 文档 |
|
||||
| `compliance.has_ddl_baseline` | compliance-prescan | 是否已更新 DDL 基线 |
|
||||
| `external_files` | agent-on-stop | 非 Kiro 操作产生的变更文件(CLI/脚本/手动编辑) |
|
||||
| `prompt_id` / `latest_prompt_log` | prompt-audit-log | Prompt-ID 与原文(溯源用) |
|
||||
|
||||
**禁止操作**:
|
||||
- ❌ 运行 `git status --porcelain`(已有 `changed_files`)
|
||||
- ❌ 运行 `git diff` 全量(已有 `high_risk_diff` + `diff_stat`)
|
||||
- ❌ 遍历目录寻找变更文件(已有分类好的列表)
|
||||
- ❌ 运行 `change_compliance_prescan.py`(已有 `compliance` 数据)
|
||||
|
||||
**允许操作**:
|
||||
- ✅ 读取具体文件内容(如需更新某个 README 时读取其当前内容)
|
||||
- ✅ 对单个文件运行 `git diff HEAD -- <file>`(仅当 context 中 diff 被截断时)
|
||||
- ✅ 连接测试库验证迁移执行状态(仅当 `new_migration_sql` 非空时)
|
||||
|
||||
## 审计产物路径(统一根目录)
|
||||
- 变更审计记录:`docs/audit/changes/<YYYY-MM-DD>__<slug>.md`
|
||||
- 审计一览表:`docs/audit/audit_dashboard.md`(自动生成,勿手动编辑)
|
||||
- Prompt 日志:`docs/audit/prompt_logs/`
|
||||
- 一览表刷新命令:`python scripts/audit/gen_audit_dashboard.py`
|
||||
- 所有审计产物统一写入项目根目录 `docs/audit/`,不要写入子模块(如 `apps/etl/connectors/feiqiu/docs/audit/`)内部
|
||||
|
||||
## 输入来源(不要询问主代理)
|
||||
- 通过 `git status --porcelain` 和 `git diff` 获取本次未提交变更
|
||||
- 通过 `docs/audit/prompt_logs/` 目录下的独立日志文件与 `.kiro/.last_prompt_id.json` 获取最新 Prompt-ID 与 prompt 原文(用于溯源)
|
||||
- 通过项目实际文件内容判断是否"逻辑改动"
|
||||
- 所有审计产物统一写入项目根目录 `docs/audit/`,不要写入子模块内部
|
||||
|
||||
## 何时需要做"重型后处理"
|
||||
满足任一即执行审计收口(否则只输出"无逻辑改动/无需审计",并清除待审计标记):
|
||||
- 改动文件命中 ETL Connector 高风险路径:`apps/etl/connectors/feiqiu/` 下的 `api/`、`cli/`、`config/`、`database/`、`loaders/`、`models/`、`orchestration/`、`scd/`、`tasks/`、`utils/`、`quality/`
|
||||
- 改动文件命中后端 API:`apps/backend/app/`
|
||||
- 改动文件命中管理后台源码:`apps/admin-web/src/`
|
||||
- 改动文件命中小程序源码:`apps/miniprogram/miniapp/`、`apps/miniprogram/miniprogram/`
|
||||
- 改动文件命中共享包:`packages/shared/`
|
||||
- 改动文件命中数据库定义:`db/` 下的 DDL / migration / seed 文件
|
||||
- 根目录散文件(`pyproject.toml`、`.env*` 等)
|
||||
- 发生 DB schema / migration / *.sql / *.prisma 变更
|
||||
- 明确属于业务口径/资金精度舍入/API 契约/鉴权权限/调度游标 等逻辑改变
|
||||
根据 `audit_context.json` 中的 `audit_required` 和 `reasons` 判断:
|
||||
- `audit_required: true` → 执行完整审计流程
|
||||
- `audit_required: false` → 输出"无需审计",清除标记,退出
|
||||
|
||||
## 执行策略(尽量少写、但必须完整)
|
||||
1) 判断是否逻辑改动
|
||||
2) 若是逻辑改动:
|
||||
- 按需调用 skill:
|
||||
- steering-readme-maintainer(同步 product/tech/structure-lite/README)
|
||||
- change-annotation-audit(写 docs/audit/changes/... + AI_CHANGELOG + CHANGE 注释)
|
||||
- bd-manual-db-docs(仅当 DB schema 变更)
|
||||
3) 完成后把 `.kiro/.audit_state.json` 的 `audit_required` 置为 false(或清空 reasons/changed_files/last_reminded_at)
|
||||
4) 执行 `python scripts/audit/gen_audit_dashboard.py` 刷新审计一览表
|
||||
## 执行策略(从 context 驱动,不做冗余扫描)
|
||||
|
||||
### 步骤 1:读取上下文
|
||||
读取 `.kiro/.audit_context.json`,提取关键字段。
|
||||
|
||||
### 步骤 2:审计落盘(按需调用 skill)
|
||||
根据 `reasons` 判断需要哪些 skill:
|
||||
- 含 `dir:backend` / `dir:etl` / `dir:shared` 等 → 调用 `steering-readme-maintainer`
|
||||
- 含任意高风险标签 → 调用 `change-annotation-audit`(写 docs/audit/changes/ + AI_CHANGELOG + CHANGE 注释)
|
||||
- 含 `db-schema-change` → 调用 `bd-manual-db-docs`
|
||||
|
||||
若 `external_files` 非空,在审计记录(`docs/audit/changes/` 文件)中增加「外部变更」段落:
|
||||
- 列出所有外部变更文件路径
|
||||
- 标注来源为"非 Kiro 操作(CLI/脚本/手动编辑)"
|
||||
- 若外部变更涉及高风险路径,额外标注 ⚠️
|
||||
|
||||
### 步骤 3:文档校对补齐
|
||||
遍历 `compliance.code_without_docs`,对每个缺失项:
|
||||
- 读取对应代码文件当前内容(不需要 diff,直接读文件)
|
||||
- 更新对应文档:
|
||||
|
||||
| 代码路径前缀 | 应同步更新的文档 |
|
||||
|---|---|
|
||||
| `apps/backend/app/routers/` | `apps/backend/docs/API-REFERENCE.md` |
|
||||
| `apps/backend/app/services/` | `apps/backend/docs/API-REFERENCE.md` + `apps/backend/README.md` |
|
||||
| `apps/backend/app/auth/` | `apps/backend/docs/API-REFERENCE.md` + `apps/backend/README.md` |
|
||||
| `apps/etl/connectors/feiqiu/tasks/` | `apps/etl/connectors/feiqiu/docs/etl_tasks/` |
|
||||
| `apps/etl/connectors/feiqiu/loaders/` | `apps/etl/connectors/feiqiu/docs/etl_tasks/` |
|
||||
| `apps/etl/connectors/feiqiu/scd/` | `apps/etl/connectors/feiqiu/docs/business-rules/scd2_rules.md` |
|
||||
| `apps/etl/connectors/feiqiu/orchestration/` | `apps/etl/connectors/feiqiu/docs/architecture/` |
|
||||
| `apps/admin-web/src/` | `apps/admin-web/README.md` |
|
||||
| `apps/miniprogram/` | `apps/miniprogram/README.md` |
|
||||
| `packages/shared/` | `packages/shared/README.md` |
|
||||
| `db/*/migrations/*.sql` | `docs/database/BD_Manual_*.md` + `docs/database/ddl/` |
|
||||
|
||||
### 步骤 4:DDL/迁移检查
|
||||
- 若 `compliance.new_migration_sql` 非空:
|
||||
- 连接测试库验证迁移是否已执行
|
||||
- 在审计记录中标注执行状态
|
||||
- 若 `compliance.new_migration_sql` 非空且 `compliance.has_ddl_baseline` 为 false:
|
||||
- 在审计记录中标注 ⚠️ DDL 基线待合并
|
||||
|
||||
### 步骤 5:收尾
|
||||
- 把 `.kiro/.audit_state.json` 的 `audit_required` 置为 false,清空 `reasons`/`changed_files`/`last_reminded_at`
|
||||
- 执行 `python scripts/audit/gen_audit_dashboard.py` 刷新审计一览表
|
||||
|
||||
## 输出(强制极短回执)
|
||||
你最终只允许输出 3 段信息:
|
||||
|
||||
15
.kiro/hooks/agent-on-stop.kiro.hook
Normal file
15
.kiro/hooks/agent-on-stop.kiro.hook
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"enabled": true,
|
||||
"name": "Agent On Stop (Merged)",
|
||||
"description": "合并 hook:对话结束时检测变更(含非 Kiro 外部变更)、记录 session log、合规预扫描、构建审计上下文、审计提醒。无变更时跳过。纯 Shell,零 Token。",
|
||||
"version": "1",
|
||||
"when": {
|
||||
"type": "agentStop"
|
||||
},
|
||||
"then": {
|
||||
"type": "runCommand",
|
||||
"command": "python .kiro/scripts/agent_on_stop.py"
|
||||
},
|
||||
"workspaceFolderName": "NeoZQYY",
|
||||
"shortName": "agent-on-stop"
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"enabled": true,
|
||||
"enabled": false,
|
||||
"name": "Audit Flagger (Prompt Submit)",
|
||||
"description": "每次提交 prompt 时,基于 git status 判断是否存在高风险改动;若需要审计则写入 .kiro/.audit_state.json(无 stdout)。",
|
||||
"version": "1",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"enabled": true,
|
||||
"enabled": false,
|
||||
"name": "Audit Reminder (Agent Stop, 15min)",
|
||||
"description": "若检测到高风险改动且未审计,则在 agentStop 以 stderr+非0 形式提醒(15 分钟限频;不写 stdout)。",
|
||||
"version": "1",
|
||||
|
||||
15
.kiro/hooks/change-compliance.kiro.hook
Normal file
15
.kiro/hooks/change-compliance.kiro.hook
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"enabled": false,
|
||||
"name": "Change Compliance Check (Agent Stop)",
|
||||
"description": "对话结束时,审查本次变更的合规性:DB 迁移是否已执行、DDL 是否合并至基线、新增文件是否遵循 doc-map、代码修改是否有对应文档/审计记录。先运行预扫描脚本过滤,无需审查时静默跳过以节省 Token。",
|
||||
"version": "1",
|
||||
"when": {
|
||||
"type": "agentStop"
|
||||
},
|
||||
"then": {
|
||||
"type": "askAgent",
|
||||
"prompt": "先运行 `python .kiro/scripts/change_compliance_prescan.py` 获取预扫描结果。\n\n如果输出为 `NO_CHECK_NEEDED`,则回复「✅ 合规检查:无需审查项」,不做任何其他操作。\n\n如果输出为 JSON,则根据以下清单逐项审查并输出简短结论(每项一行,用 ✅/⚠️ 标记):\n\n1. **DB 迁移执行**:检查 `new_migration_sql` 中的 SQL 文件,连接测试库(pg_etl_test / pg_app_test)验证对应表/字段是否已存在。若未执行,标记 ⚠️ 并列出待执行文件。\n2. **DDL 基线合并**:若有迁移 SQL 但 `has_ddl_baseline` 为 false,检查 `docs/database/ddl/` 下对应基线文件是否已更新。\n3. **目录规范**:检查变更文件列表中的新增文件路径是否符合 doc-map 规范(模块专属放模块内、项目级放根目录、审计产物放 docs/audit/)。\n4. **文档同步**:检查 `code_without_docs` 列表,列出缺少对应文档更新的代码文件及其应更新的文档路径。\n\n输出格式极简,不超过 15 行。"
|
||||
},
|
||||
"workspaceFolderName": "NeoZQYY",
|
||||
"shortName": "change-compliance"
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"enabled": true,
|
||||
"enabled": false,
|
||||
"name": "Prompt Audit Log (Shell)",
|
||||
"description": "每次提交 prompt 时,用本地 Shell 在 docs/audit/prompt_logs/ 生成独立日志文件(按时间戳命名);不触发 LLM,避免上下文膨胀。",
|
||||
"version": "3",
|
||||
|
||||
15
.kiro/hooks/prompt-on-submit.kiro.hook
Normal file
15
.kiro/hooks/prompt-on-submit.kiro.hook
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"enabled": true,
|
||||
"name": "Prompt On Submit (Merged)",
|
||||
"description": "合并 hook:每次提交 prompt 时执行风险标记 + prompt 日志记录 + git 快照。纯 Shell,零 Token。",
|
||||
"version": "1",
|
||||
"when": {
|
||||
"type": "promptSubmit"
|
||||
},
|
||||
"then": {
|
||||
"type": "runCommand",
|
||||
"command": "python .kiro/scripts/prompt_on_submit.py"
|
||||
},
|
||||
"workspaceFolderName": "NeoZQYY",
|
||||
"shortName": "prompt-on-submit"
|
||||
}
|
||||
@@ -1,14 +1,14 @@
|
||||
{
|
||||
"enabled": true,
|
||||
"name": "Manual: Run /audit (via audit-writer subagent)",
|
||||
"description": "按需触发:启动 audit-writer 子代理执行变更影响审查+文档同步+审计落盘,完成后自动刷新审计一览表,并仅回传极短回执。",
|
||||
"version": "2",
|
||||
"description": "按需触发:读取 agent-on-stop 预构建的审计上下文,启动 audit-writer 子代理执行审计落盘+文档校对。上下文过期时自动重建。",
|
||||
"version": "5",
|
||||
"when": {
|
||||
"type": "userTriggered"
|
||||
},
|
||||
"then": {
|
||||
"type": "askAgent",
|
||||
"prompt": "立刻启动名为 audit-writer 的子代理来执行「后处理写入/审计收口」流程。\n\n约束:\n- 子代理自行使用 git status/diff 与 .kiro/.last_prompt_id.json 中最新 Prompt-ID 作为溯源;不要依赖主对话上下文。\n- 子代理必须按需调用 skill:steering-readme-maintainer、change-annotation-audit、bd-manual-db-docs(仅在满足触发条件时)。\n- 所有审计产物统一写入项目根目录 docs/audit/(变更记录写 docs/audit/changes/,prompt 日志写 docs/audit/prompt_logs/),不要写入子模块内部。\n- 子代理结束后,必须把 .kiro/.audit_state.json 中 audit_required 置为 false(或清空文件),以停止后续提醒。\n- 审计落盘完成后,必须执行 `python scripts/audit/gen_audit_dashboard.py` 刷新审计一览表(docs/audit/audit_dashboard.md)。\n- 你的最终回复必须是「极短回执」,只包含:\n 1) 是否完成(yes/no)\n 2) 写了哪些文件(文件列表)\n 3) 如果失败,下一步怎么做(1~2 条)"
|
||||
"prompt": "执行 /audit 审计流程:\n\n**前置检查**:读取 `.kiro/.audit_context.json`,检查 `built_at` 时间戳。若文件不存在或 `built_at` 超过 30 分钟,先运行 `python .kiro/scripts/agent_on_stop.py` 重建上下文,再重新读取。\n\n**主流程**:启动名为 audit-writer 的子代理,传入以下指令:\n\n> 读取 `.kiro/.audit_context.json` 作为唯一输入,不要自行运行 git status/diff/扫描文件。该文件已包含:变更文件列表、高风险文件 diff、合规检查清单(文档缺失/迁移状态/DDL 基线)、外部变更文件列表(external_files)、Prompt-ID 溯源。按 audit-writer.md 中定义的执行策略完成审计落盘+文档校对补齐。\n\n约束:\n- 子代理禁止重复运行 git status --porcelain 或 git diff 全量扫描,所有信息已在 .audit_context.json 中预备好。\n- 子代理需要读取具体文件内容时(如更新文档),可以直接读取对应文件,但不要做全仓库遍历。\n- 子代理必须按需调用 skill:steering-readme-maintainer、change-annotation-audit、bd-manual-db-docs(仅在满足触发条件时)。\n- 子代理必须根据 compliance.code_without_docs 自动补齐缺失的文档同步。\n- 若 external_files 非空,在审计记录中增加「外部变更」段落,列出这些文件并标注来源为非 Kiro 操作。\n- 所有审计产物统一写入 docs/audit/,不写入子模块内部。\n- 完成后把 .kiro/.audit_state.json 中 audit_required 置为 false。\n- 执行 `python scripts/audit/gen_audit_dashboard.py` 刷新审计一览表。\n- 最终回复必须是极短回执:done/files_written/next_step。"
|
||||
},
|
||||
"workspaceFolderName": "NeoZQYY",
|
||||
"shortName": "audit"
|
||||
|
||||
15
.kiro/hooks/session-log.kiro.hook
Normal file
15
.kiro/hooks/session-log.kiro.hook
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"enabled": false,
|
||||
"name": "Session Log (Agent Stop)",
|
||||
"description": "每次对话结束时,记录本次对话的完整日志(用户输入、agent 输出、变更文件、git diff stat)到 docs/audit/session_logs/。纯 Shell 执行,不触发 LLM。",
|
||||
"version": "1",
|
||||
"when": {
|
||||
"type": "agentStop"
|
||||
},
|
||||
"then": {
|
||||
"type": "runCommand",
|
||||
"command": "python .kiro/scripts/session_log.py"
|
||||
},
|
||||
"workspaceFolderName": "NeoZQYY",
|
||||
"shortName": "session-log"
|
||||
}
|
||||
498
.kiro/scripts/agent_on_stop.py
Normal file
498
.kiro/scripts/agent_on_stop.py
Normal file
@@ -0,0 +1,498 @@
|
||||
#!/usr/bin/env python3
|
||||
"""agent_on_stop — agentStop 合并 hook 脚本。
|
||||
|
||||
合并原 audit_reminder + session_log + change_compliance_prescan + build_audit_context:
|
||||
1. 检测变更(对比 promptSubmit 快照,识别非 Kiro 变更)
|
||||
2. 若无任何文件变更 → 跳过所有审查,静默退出
|
||||
3. 记录 session log → docs/audit/session_logs/
|
||||
4. 合规预扫描 → .kiro/.compliance_state.json
|
||||
5. 构建审计上下文 → .kiro/.audit_context.json
|
||||
6. 审计提醒(15 分钟限频)→ stderr
|
||||
|
||||
所有功能块用 try/except 隔离,单个失败不影响其他。
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
TZ_TAIPEI = timezone(timedelta(hours=8))
|
||||
MIN_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
# 路径常量
|
||||
STATE_PATH = os.path.join(".kiro", ".audit_state.json")
|
||||
SNAPSHOT_PATH = os.path.join(".kiro", ".git_snapshot.json")
|
||||
COMPLIANCE_PATH = os.path.join(".kiro", ".compliance_state.json")
|
||||
CONTEXT_PATH = os.path.join(".kiro", ".audit_context.json")
|
||||
PROMPT_ID_PATH = os.path.join(".kiro", ".last_prompt_id.json")
|
||||
SESSION_LOG_DIR = os.path.join("docs", "audit", "session_logs")
|
||||
|
||||
# 噪声路径
|
||||
NOISE_PATTERNS = [
|
||||
re.compile(r"^docs/audit/"),
|
||||
re.compile(r"^\.kiro/"),
|
||||
re.compile(r"^\.hypothesis/"),
|
||||
re.compile(r"^tmp/"),
|
||||
re.compile(r"\.png$"),
|
||||
re.compile(r"\.jpg$"),
|
||||
]
|
||||
|
||||
# 高风险路径
|
||||
HIGH_RISK_PATTERNS = [
|
||||
re.compile(r"^apps/etl/connectors/feiqiu/(api|cli|config|database|loaders|models|orchestration|scd|tasks|utils|quality)/"),
|
||||
re.compile(r"^apps/backend/app/"),
|
||||
re.compile(r"^apps/admin-web/src/"),
|
||||
re.compile(r"^apps/miniprogram/"),
|
||||
re.compile(r"^packages/shared/"),
|
||||
re.compile(r"^db/"),
|
||||
]
|
||||
|
||||
# 文档映射(合规检查用)
|
||||
DOC_MAP = {
|
||||
"apps/backend/app/routers/": ["apps/backend/docs/API-REFERENCE.md"],
|
||||
"apps/backend/app/services/": ["apps/backend/docs/API-REFERENCE.md", "apps/backend/README.md"],
|
||||
"apps/backend/app/auth/": ["apps/backend/docs/API-REFERENCE.md", "apps/backend/README.md"],
|
||||
"apps/etl/connectors/feiqiu/tasks/": ["apps/etl/connectors/feiqiu/docs/etl_tasks/"],
|
||||
"apps/etl/connectors/feiqiu/loaders/": ["apps/etl/connectors/feiqiu/docs/etl_tasks/"],
|
||||
"apps/etl/connectors/feiqiu/scd/": ["apps/etl/connectors/feiqiu/docs/business-rules/scd2_rules.md"],
|
||||
"apps/etl/connectors/feiqiu/orchestration/": ["apps/etl/connectors/feiqiu/docs/architecture/"],
|
||||
"apps/admin-web/src/": ["apps/admin-web/README.md"],
|
||||
"apps/miniprogram/": ["apps/miniprogram/README.md"],
|
||||
"packages/shared/": ["packages/shared/README.md"],
|
||||
}
|
||||
|
||||
MIGRATION_PATTERNS = [
|
||||
re.compile(r"^db/etl_feiqiu/migrations/.*\.sql$"),
|
||||
re.compile(r"^db/zqyy_app/migrations/.*\.sql$"),
|
||||
re.compile(r"^db/fdw/.*\.sql$"),
|
||||
]
|
||||
|
||||
BD_MANUAL_PATTERN = re.compile(r"^docs/database/BD_Manual_.*\.md$")
|
||||
DDL_BASELINE_DIR = "docs/database/ddl/"
|
||||
AUDIT_CHANGES_DIR = "docs/audit/changes/"
|
||||
|
||||
|
||||
def now_taipei():
|
||||
return datetime.now(TZ_TAIPEI)
|
||||
|
||||
|
||||
def sha1hex(s: str) -> str:
|
||||
return hashlib.sha1(s.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def is_noise(f: str) -> bool:
|
||||
return any(p.search(f) for p in NOISE_PATTERNS)
|
||||
|
||||
|
||||
def safe_read_json(path):
|
||||
if not os.path.isfile(path):
|
||||
return {}
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def write_json(path, data):
|
||||
os.makedirs(os.path.dirname(path) or ".kiro", exist_ok=True)
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
|
||||
def get_changed_files() -> list[str]:
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "status", "--porcelain"],
|
||||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=10
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return []
|
||||
except Exception:
|
||||
return []
|
||||
files = []
|
||||
for line in r.stdout.splitlines():
|
||||
if len(line) < 4:
|
||||
continue
|
||||
path = line[3:].strip()
|
||||
if " -> " in path:
|
||||
path = path.split(" -> ")[-1]
|
||||
path = path.strip().strip('"').replace("\\", "/")
|
||||
if path:
|
||||
files.append(path)
|
||||
return sorted(set(files))
|
||||
|
||||
|
||||
def git_diff_stat():
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "diff", "--stat", "HEAD"],
|
||||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=15
|
||||
)
|
||||
return r.stdout.strip() if r.returncode == 0 else ""
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def git_diff_files(files, max_total=30000):
|
||||
if not files:
|
||||
return ""
|
||||
all_diff = []
|
||||
total_len = 0
|
||||
for f in files:
|
||||
if total_len >= max_total:
|
||||
all_diff.append(f"\n[TRUNCATED: diff exceeds {max_total // 1000}KB]")
|
||||
break
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "diff", "HEAD", "--", f],
|
||||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=10
|
||||
)
|
||||
if r.returncode == 0 and r.stdout.strip():
|
||||
chunk = r.stdout.strip()
|
||||
if len(chunk) > 5000:
|
||||
chunk = chunk[:5000] + f"\n[TRUNCATED: {f} diff too long]"
|
||||
all_diff.append(chunk)
|
||||
total_len += len(chunk)
|
||||
except Exception:
|
||||
continue
|
||||
return "\n".join(all_diff)
|
||||
|
||||
|
||||
def get_latest_prompt_log():
|
||||
log_dir = os.path.join("docs", "audit", "prompt_logs")
|
||||
if not os.path.isdir(log_dir):
|
||||
return ""
|
||||
try:
|
||||
files = sorted(
|
||||
[f for f in os.listdir(log_dir) if f.startswith("prompt_log_")],
|
||||
reverse=True
|
||||
)
|
||||
if not files:
|
||||
return ""
|
||||
with open(os.path.join(log_dir, files[0]), "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
return content[:3000] + "\n[TRUNCATED]" if len(content) > 3000 else content
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
# ── 步骤 1:检测变更,识别非 Kiro 变更 ──
|
||||
def detect_changes(current_files):
|
||||
"""对比 promptSubmit 快照,返回 (real_files, external_files, no_change)"""
|
||||
snapshot = safe_read_json(SNAPSHOT_PATH)
|
||||
snapshot_files = set(snapshot.get("files", []))
|
||||
current_set = set(current_files)
|
||||
|
||||
# 排除噪声后的真实变更
|
||||
real_files = sorted(f for f in current_files if not is_noise(f))
|
||||
|
||||
if not real_files:
|
||||
return [], [], True
|
||||
|
||||
# 检测非 Kiro 变更:在 agentStop 时出现但 promptSubmit 快照中没有的文件
|
||||
# 这些是对话期间由外部操作(CLI、脚本等)产生的变更
|
||||
new_since_submit = current_set - snapshot_files
|
||||
external_files = sorted(f for f in new_since_submit if not is_noise(f))
|
||||
|
||||
return real_files, external_files, False
|
||||
|
||||
|
||||
# ── 步骤 2:Session Log ──
|
||||
def do_session_log(now, changed_files, external_files):
|
||||
agent_output = os.environ.get("AGENT_OUTPUT", "")
|
||||
user_prompt = os.environ.get("USER_PROMPT", "")
|
||||
prompt_info = safe_read_json(PROMPT_ID_PATH)
|
||||
audit_state = safe_read_json(STATE_PATH)
|
||||
prompt_id = prompt_info.get("prompt_id", "unknown")
|
||||
|
||||
max_len = 50000
|
||||
if len(agent_output) > max_len:
|
||||
agent_output = agent_output[:max_len] + "\n\n[TRUNCATED: output exceeds 50KB]"
|
||||
if len(user_prompt) > 10000:
|
||||
user_prompt = user_prompt[:10000] + "\n\n[TRUNCATED: prompt exceeds 10KB]"
|
||||
|
||||
diff_stat = git_diff_stat()
|
||||
git_status = ""
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "status", "--short"],
|
||||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=10
|
||||
)
|
||||
git_status = r.stdout.strip() if r.returncode == 0 else ""
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
os.makedirs(SESSION_LOG_DIR, exist_ok=True)
|
||||
filename = f"session_{now.strftime('%Y%m%d_%H%M%S')}.md"
|
||||
|
||||
# 外部变更标记
|
||||
external_section = ""
|
||||
if external_files:
|
||||
ext_list = "\n".join(external_files[:30])
|
||||
external_section = f"""
|
||||
## External Changes (non-Kiro, {len(external_files)} files)
|
||||
|
||||
以下文件在本次对话期间由外部操作(CLI/脚本/手动编辑)产生:
|
||||
|
||||
```
|
||||
{ext_list}
|
||||
```
|
||||
"""
|
||||
|
||||
content = f"""# Session Log — {now.strftime('%Y-%m-%d %H:%M:%S %z')}
|
||||
|
||||
- Prompt-ID: `{prompt_id}`
|
||||
- Audit Required: `{audit_state.get('audit_required', 'N/A')}`
|
||||
- Reasons: {', '.join(audit_state.get('reasons', [])) or 'none'}
|
||||
- External Changes: {len(external_files)} files
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
{user_prompt or '(not captured)'}
|
||||
```
|
||||
|
||||
## Agent Output
|
||||
|
||||
```text
|
||||
{agent_output or '(not captured)'}
|
||||
```
|
||||
|
||||
## Changed Files ({len(changed_files)})
|
||||
|
||||
```
|
||||
{chr(10).join(changed_files[:80]) if changed_files else '(none)'}
|
||||
```
|
||||
{external_section}
|
||||
## Git Diff Stat
|
||||
|
||||
```
|
||||
{diff_stat}
|
||||
```
|
||||
|
||||
## Git Status
|
||||
|
||||
```
|
||||
{git_status or '(clean)'}
|
||||
```
|
||||
"""
|
||||
with open(os.path.join(SESSION_LOG_DIR, filename), "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
# ── 步骤 3:合规预扫描 ──
|
||||
def do_compliance_prescan(all_files):
|
||||
result = {
|
||||
"new_migration_sql": [],
|
||||
"new_or_modified_sql": [],
|
||||
"code_without_docs": [],
|
||||
"new_files": [],
|
||||
"has_bd_manual": False,
|
||||
"has_audit_record": False,
|
||||
"has_ddl_baseline": False,
|
||||
}
|
||||
|
||||
code_files = []
|
||||
doc_files = set()
|
||||
|
||||
for f in all_files:
|
||||
if is_noise(f):
|
||||
continue
|
||||
for mp in MIGRATION_PATTERNS:
|
||||
if mp.search(f):
|
||||
result["new_migration_sql"].append(f)
|
||||
break
|
||||
if f.endswith(".sql"):
|
||||
result["new_or_modified_sql"].append(f)
|
||||
if BD_MANUAL_PATTERN.search(f):
|
||||
result["has_bd_manual"] = True
|
||||
if f.startswith(AUDIT_CHANGES_DIR):
|
||||
result["has_audit_record"] = True
|
||||
if f.startswith(DDL_BASELINE_DIR):
|
||||
result["has_ddl_baseline"] = True
|
||||
if f.endswith(".md") or "/docs/" in f:
|
||||
doc_files.add(f)
|
||||
if f.endswith((".py", ".ts", ".tsx", ".js", ".jsx")):
|
||||
code_files.append(f)
|
||||
|
||||
for cf in code_files:
|
||||
expected_docs = []
|
||||
for prefix, docs in DOC_MAP.items():
|
||||
if cf.startswith(prefix):
|
||||
expected_docs.extend(docs)
|
||||
if expected_docs:
|
||||
has_doc = False
|
||||
for ed in expected_docs:
|
||||
if ed in doc_files:
|
||||
has_doc = True
|
||||
break
|
||||
if ed.endswith("/") and any(d.startswith(ed) for d in doc_files):
|
||||
has_doc = True
|
||||
break
|
||||
if not has_doc:
|
||||
result["code_without_docs"].append({
|
||||
"file": cf,
|
||||
"expected_docs": expected_docs,
|
||||
})
|
||||
|
||||
needs_check = bool(
|
||||
result["new_migration_sql"]
|
||||
or result["code_without_docs"]
|
||||
)
|
||||
|
||||
now = now_taipei()
|
||||
write_json(COMPLIANCE_PATH, {
|
||||
"needs_check": needs_check,
|
||||
"scanned_at": now.isoformat(),
|
||||
**result,
|
||||
})
|
||||
return result
|
||||
|
||||
|
||||
# ── 步骤 4:构建审计上下文 ──
|
||||
def do_build_audit_context(all_files, external_files, compliance):
|
||||
now = now_taipei()
|
||||
audit_state = safe_read_json(STATE_PATH)
|
||||
prompt_info = safe_read_json(PROMPT_ID_PATH)
|
||||
|
||||
changed_files = audit_state.get("changed_files", all_files[:50])
|
||||
high_risk_files = [
|
||||
f for f in changed_files
|
||||
if any(p.search(f) for p in HIGH_RISK_PATTERNS)
|
||||
]
|
||||
|
||||
diff_stat = git_diff_stat()
|
||||
high_risk_diff = git_diff_files(high_risk_files)
|
||||
prompt_log = get_latest_prompt_log()
|
||||
|
||||
context = {
|
||||
"built_at": now.isoformat(),
|
||||
"prompt_id": prompt_info.get("prompt_id", "unknown"),
|
||||
"prompt_at": prompt_info.get("at", ""),
|
||||
"audit_required": audit_state.get("audit_required", False),
|
||||
"db_docs_required": audit_state.get("db_docs_required", False),
|
||||
"reasons": audit_state.get("reasons", []),
|
||||
"changed_files": changed_files,
|
||||
"high_risk_files": high_risk_files,
|
||||
"external_files": external_files,
|
||||
"compliance": {
|
||||
"code_without_docs": compliance.get("code_without_docs", []),
|
||||
"new_migration_sql": compliance.get("new_migration_sql", []),
|
||||
"has_bd_manual": compliance.get("has_bd_manual", False),
|
||||
"has_audit_record": compliance.get("has_audit_record", False),
|
||||
"has_ddl_baseline": compliance.get("has_ddl_baseline", False),
|
||||
},
|
||||
"diff_stat": diff_stat,
|
||||
"high_risk_diff": high_risk_diff,
|
||||
"latest_prompt_log": prompt_log,
|
||||
}
|
||||
|
||||
write_json(CONTEXT_PATH, context)
|
||||
|
||||
|
||||
# ── 步骤 5:审计提醒(15 分钟限频) ──
|
||||
def do_audit_reminder(real_files):
|
||||
state = safe_read_json(STATE_PATH)
|
||||
if not state.get("audit_required"):
|
||||
return
|
||||
|
||||
# 工作树干净时清除
|
||||
if not real_files:
|
||||
state["audit_required"] = False
|
||||
state["reasons"] = []
|
||||
state["changed_files"] = []
|
||||
state["last_reminded_at"] = None
|
||||
write_json(STATE_PATH, state)
|
||||
return
|
||||
|
||||
now = now_taipei()
|
||||
last_str = state.get("last_reminded_at")
|
||||
if last_str:
|
||||
try:
|
||||
last = datetime.fromisoformat(last_str)
|
||||
if (now - last) < MIN_INTERVAL:
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
state["last_reminded_at"] = now.isoformat()
|
||||
write_json(STATE_PATH, state)
|
||||
|
||||
reasons = state.get("reasons", [])
|
||||
reason_text = ", ".join(reasons) if reasons else "high-risk paths changed"
|
||||
ext_note = ""
|
||||
# 从 context 读取外部变更数量
|
||||
ctx = safe_read_json(CONTEXT_PATH)
|
||||
ext_count = len(ctx.get("external_files", []))
|
||||
if ext_count:
|
||||
ext_note = f" (includes {ext_count} external/non-Kiro changes)"
|
||||
|
||||
sys.stderr.write(
|
||||
f"[AUDIT REMINDER] Pending audit ({reason_text}){ext_note}. "
|
||||
f"Run /audit to sync. (15min rate limit)\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
# 非 git 仓库直接退出
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "rev-parse", "--is-inside-work-tree"],
|
||||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=5
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return
|
||||
except Exception:
|
||||
return
|
||||
|
||||
now = now_taipei()
|
||||
current_files = get_changed_files()
|
||||
|
||||
# 步骤 1:检测变更
|
||||
real_files, external_files, no_change = detect_changes(current_files)
|
||||
|
||||
# 无任何文件变更 → 跳过所有审查
|
||||
if no_change:
|
||||
return
|
||||
|
||||
# 步骤 2:Session Log(始终记录,包括外部变更)
|
||||
try:
|
||||
do_session_log(now, real_files, external_files)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 步骤 3:合规预扫描
|
||||
compliance = {}
|
||||
try:
|
||||
compliance = do_compliance_prescan(current_files)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 步骤 4:构建审计上下文(预备 /audit 使用)
|
||||
try:
|
||||
do_build_audit_context(current_files, external_files, compliance)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 步骤 5:审计提醒(最后执行,可能 sys.exit(1))
|
||||
try:
|
||||
do_audit_reminder(real_files)
|
||||
except SystemExit:
|
||||
raise
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except SystemExit as e:
|
||||
sys.exit(e.code)
|
||||
except Exception:
|
||||
pass
|
||||
174
.kiro/scripts/build_audit_context.py
Normal file
174
.kiro/scripts/build_audit_context.py
Normal file
@@ -0,0 +1,174 @@
|
||||
#!/usr/bin/env python3
|
||||
"""build_audit_context — 合并所有前置 hook 产出,生成统一审计上下文快照。
|
||||
|
||||
读取:
|
||||
- .kiro/.audit_state.json(audit-flagger 产出:风险判定、变更文件列表)
|
||||
- .kiro/.compliance_state.json(change-compliance 产出:文档缺失、迁移状态)
|
||||
- .kiro/.last_prompt_id.json(prompt-audit-log 产出:Prompt ID 溯源)
|
||||
- git diff --stat HEAD(变更统计摘要)
|
||||
- git diff HEAD(仅高风险文件的 diff,截断到合理长度)
|
||||
|
||||
输出:.kiro/.audit_context.json(audit-writer 子代理的唯一输入)
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
TZ_TAIPEI = timezone(timedelta(hours=8))
|
||||
CONTEXT_PATH = os.path.join(".kiro", ".audit_context.json")
|
||||
|
||||
# 高风险路径(只对这些文件取 diff,避免 diff 过大)
|
||||
HIGH_RISK_PATTERNS = [
|
||||
re.compile(r"^apps/etl/connectors/feiqiu/(api|cli|config|database|loaders|models|orchestration|scd|tasks|utils|quality)/"),
|
||||
re.compile(r"^apps/backend/app/"),
|
||||
re.compile(r"^apps/admin-web/src/"),
|
||||
re.compile(r"^apps/miniprogram/"),
|
||||
re.compile(r"^packages/shared/"),
|
||||
re.compile(r"^db/"),
|
||||
]
|
||||
|
||||
|
||||
def safe_read_json(path):
|
||||
if not os.path.isfile(path):
|
||||
return {}
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def git_diff_stat():
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "diff", "--stat", "HEAD"],
|
||||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=15
|
||||
)
|
||||
return r.stdout.strip() if r.returncode == 0 else ""
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def git_diff_files(files, max_total=30000):
|
||||
"""获取指定文件的 git diff,截断到 max_total 字符"""
|
||||
if not files:
|
||||
return ""
|
||||
# 分批取 diff,避免命令行过长
|
||||
all_diff = []
|
||||
total_len = 0
|
||||
for f in files:
|
||||
if total_len >= max_total:
|
||||
all_diff.append(f"\n[TRUNCATED: diff exceeds {max_total // 1000}KB limit]")
|
||||
break
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "diff", "HEAD", "--", f],
|
||||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=10
|
||||
)
|
||||
if r.returncode == 0 and r.stdout.strip():
|
||||
chunk = r.stdout.strip()
|
||||
# 单文件 diff 截断
|
||||
if len(chunk) > 5000:
|
||||
chunk = chunk[:5000] + f"\n[TRUNCATED: {f} diff too long]"
|
||||
all_diff.append(chunk)
|
||||
total_len += len(chunk)
|
||||
except Exception:
|
||||
continue
|
||||
return "\n".join(all_diff)
|
||||
|
||||
|
||||
def get_latest_prompt_log():
|
||||
"""获取最新的 prompt log 文件内容(用于溯源)"""
|
||||
log_dir = os.path.join("docs", "audit", "prompt_logs")
|
||||
if not os.path.isdir(log_dir):
|
||||
return ""
|
||||
try:
|
||||
files = sorted(
|
||||
[f for f in os.listdir(log_dir) if f.startswith("prompt_log_")],
|
||||
reverse=True
|
||||
)
|
||||
if not files:
|
||||
return ""
|
||||
latest = os.path.join(log_dir, files[0])
|
||||
with open(latest, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
# 截断过长内容
|
||||
if len(content) > 3000:
|
||||
content = content[:3000] + "\n[TRUNCATED]"
|
||||
return content
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def main():
|
||||
now = datetime.now(TZ_TAIPEI)
|
||||
|
||||
# 读取前置 hook 产出
|
||||
audit_state = safe_read_json(os.path.join(".kiro", ".audit_state.json"))
|
||||
compliance = safe_read_json(os.path.join(".kiro", ".compliance_state.json"))
|
||||
prompt_id_info = safe_read_json(os.path.join(".kiro", ".last_prompt_id.json"))
|
||||
|
||||
# 从 audit_state 提取高风险文件
|
||||
changed_files = audit_state.get("changed_files", [])
|
||||
high_risk_files = [
|
||||
f for f in changed_files
|
||||
if any(p.search(f) for p in HIGH_RISK_PATTERNS)
|
||||
]
|
||||
|
||||
# 获取 diff(仅高风险文件)
|
||||
diff_stat = git_diff_stat()
|
||||
high_risk_diff = git_diff_files(high_risk_files)
|
||||
|
||||
# 获取最新 prompt log
|
||||
prompt_log = get_latest_prompt_log()
|
||||
|
||||
# 构建统一上下文
|
||||
context = {
|
||||
"built_at": now.isoformat(),
|
||||
"prompt_id": prompt_id_info.get("prompt_id", "unknown"),
|
||||
"prompt_at": prompt_id_info.get("at", ""),
|
||||
|
||||
# 来自 audit-flagger
|
||||
"audit_required": audit_state.get("audit_required", False),
|
||||
"db_docs_required": audit_state.get("db_docs_required", False),
|
||||
"reasons": audit_state.get("reasons", []),
|
||||
"changed_files": changed_files,
|
||||
"high_risk_files": high_risk_files,
|
||||
|
||||
# 来自 change-compliance-prescan
|
||||
"compliance": {
|
||||
"code_without_docs": compliance.get("code_without_docs", []),
|
||||
"new_migration_sql": compliance.get("new_migration_sql", []),
|
||||
"has_bd_manual": compliance.get("has_bd_manual", False),
|
||||
"has_audit_record": compliance.get("has_audit_record", False),
|
||||
"has_ddl_baseline": compliance.get("has_ddl_baseline", False),
|
||||
},
|
||||
|
||||
# git 摘要
|
||||
"diff_stat": diff_stat,
|
||||
"high_risk_diff": high_risk_diff,
|
||||
|
||||
# prompt 溯源
|
||||
"latest_prompt_log": prompt_log,
|
||||
}
|
||||
|
||||
os.makedirs(".kiro", exist_ok=True)
|
||||
with open(CONTEXT_PATH, "w", encoding="utf-8") as f:
|
||||
json.dump(context, f, indent=2, ensure_ascii=False)
|
||||
|
||||
# 输出摘要到 stdout
|
||||
print(f"audit_context built: {len(changed_files)} files, "
|
||||
f"{len(high_risk_files)} high-risk, "
|
||||
f"{len(compliance.get('code_without_docs', []))} docs missing")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception as e:
|
||||
sys.stderr.write(f"build_audit_context failed: {e}\n")
|
||||
sys.exit(1)
|
||||
243
.kiro/scripts/change_compliance_prescan.py
Normal file
243
.kiro/scripts/change_compliance_prescan.py
Normal file
@@ -0,0 +1,243 @@
|
||||
#!/usr/bin/env python3
|
||||
"""change_compliance_prescan — 预扫描变更文件,输出需要合规审查的项目。
|
||||
|
||||
在 agentStop 时由 askAgent hook 调用,为 LLM 提供精简的审查清单,
|
||||
避免 LLM 自行扫描文件浪费 Token。
|
||||
|
||||
输出到 stdout(供 askAgent 读取):
|
||||
- 若无需审查:输出 "NO_CHECK_NEEDED"
|
||||
- 若需审查:输出结构化 JSON 清单
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
TZ_TAIPEI = timezone(timedelta(hours=8))
|
||||
STATE_PATH = os.path.join(".kiro", ".audit_state.json")
|
||||
|
||||
# doc-map 中定义的文档对应关系
|
||||
DOC_MAP = {
|
||||
# 代码路径前缀 → 应同步更新的文档
|
||||
"apps/backend/app/routers/": ["apps/backend/docs/API-REFERENCE.md"],
|
||||
"apps/backend/app/services/": ["apps/backend/docs/API-REFERENCE.md", "apps/backend/README.md"],
|
||||
"apps/backend/app/auth/": ["apps/backend/docs/API-REFERENCE.md", "apps/backend/README.md"],
|
||||
"apps/etl/connectors/feiqiu/tasks/": ["apps/etl/connectors/feiqiu/docs/etl_tasks/"],
|
||||
"apps/etl/connectors/feiqiu/loaders/": ["apps/etl/connectors/feiqiu/docs/etl_tasks/"],
|
||||
"apps/etl/connectors/feiqiu/scd/": ["apps/etl/connectors/feiqiu/docs/business-rules/scd2_rules.md"],
|
||||
"apps/etl/connectors/feiqiu/orchestration/": ["apps/etl/connectors/feiqiu/docs/architecture/"],
|
||||
"apps/admin-web/src/": ["apps/admin-web/README.md"],
|
||||
"apps/miniprogram/": ["apps/miniprogram/README.md"],
|
||||
"packages/shared/": ["packages/shared/README.md"],
|
||||
}
|
||||
|
||||
# DDL 基线文件(doc-map 中定义)
|
||||
DDL_BASELINE_DIR = "docs/database/ddl/"
|
||||
|
||||
# 迁移脚本路径
|
||||
MIGRATION_PATTERNS = [
|
||||
re.compile(r"^db/etl_feiqiu/migrations/.*\.sql$"),
|
||||
re.compile(r"^db/zqyy_app/migrations/.*\.sql$"),
|
||||
re.compile(r"^db/fdw/.*\.sql$"),
|
||||
]
|
||||
|
||||
# DB 文档路径
|
||||
BD_MANUAL_PATTERN = re.compile(r"^docs/database/BD_Manual_.*\.md$")
|
||||
|
||||
# 审计记录路径
|
||||
AUDIT_CHANGES_DIR = "docs/audit/changes/"
|
||||
|
||||
# 噪声路径(不参与合规检查)
|
||||
NOISE = [
|
||||
re.compile(r"^docs/audit/"),
|
||||
re.compile(r"^\.kiro/"),
|
||||
re.compile(r"^\.hypothesis/"),
|
||||
re.compile(r"^tmp/"),
|
||||
re.compile(r"\.png$"),
|
||||
re.compile(r"\.jpg$"),
|
||||
]
|
||||
|
||||
|
||||
def safe_read_json(path):
|
||||
if not os.path.isfile(path):
|
||||
return {}
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def get_changed_files():
|
||||
"""从 audit_state 或 git status 获取变更文件"""
|
||||
state = safe_read_json(STATE_PATH)
|
||||
files = state.get("changed_files", [])
|
||||
if files:
|
||||
return files
|
||||
# 回退到 git status
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "status", "--porcelain"],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return []
|
||||
result = []
|
||||
for line in r.stdout.splitlines():
|
||||
if len(line) < 4:
|
||||
continue
|
||||
path = line[3:].strip().strip('"').replace("\\", "/")
|
||||
if " -> " in path:
|
||||
path = path.split(" -> ")[-1]
|
||||
if path:
|
||||
result.append(path)
|
||||
return sorted(set(result))
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
def is_noise(f):
|
||||
return any(p.search(f) for p in NOISE)
|
||||
|
||||
|
||||
def classify_files(files):
|
||||
"""将变更文件分类,输出审查清单"""
|
||||
result = {
|
||||
"new_migration_sql": [], # 新增的迁移 SQL
|
||||
"new_or_modified_sql": [], # 所有 SQL 变更
|
||||
"code_without_docs": [], # 有代码改动但缺少对应文档改动
|
||||
"new_files": [], # 新增文件(需检查目录规范)
|
||||
"has_bd_manual": False, # 是否有 BD_Manual 文档变更
|
||||
"has_audit_record": False, # 是否有审计记录变更
|
||||
"has_ddl_baseline": False, # 是否有 DDL 基线变更
|
||||
}
|
||||
|
||||
code_files = []
|
||||
doc_files = set()
|
||||
|
||||
for f in files:
|
||||
if is_noise(f):
|
||||
continue
|
||||
|
||||
# 迁移 SQL
|
||||
for mp in MIGRATION_PATTERNS:
|
||||
if mp.search(f):
|
||||
result["new_migration_sql"].append(f)
|
||||
break
|
||||
|
||||
# SQL 文件
|
||||
if f.endswith(".sql"):
|
||||
result["new_or_modified_sql"].append(f)
|
||||
|
||||
# BD_Manual
|
||||
if BD_MANUAL_PATTERN.search(f):
|
||||
result["has_bd_manual"] = True
|
||||
|
||||
# 审计记录
|
||||
if f.startswith(AUDIT_CHANGES_DIR):
|
||||
result["has_audit_record"] = True
|
||||
|
||||
# DDL 基线
|
||||
if f.startswith(DDL_BASELINE_DIR):
|
||||
result["has_ddl_baseline"] = True
|
||||
|
||||
# 文档文件
|
||||
if f.endswith(".md") or "/docs/" in f:
|
||||
doc_files.add(f)
|
||||
|
||||
# 代码文件(非文档、非配置)
|
||||
if f.endswith((".py", ".ts", ".tsx", ".js", ".jsx")):
|
||||
code_files.append(f)
|
||||
|
||||
# 检查代码文件是否有对应文档变更
|
||||
for cf in code_files:
|
||||
expected_docs = []
|
||||
for prefix, docs in DOC_MAP.items():
|
||||
if cf.startswith(prefix):
|
||||
expected_docs.extend(docs)
|
||||
if expected_docs:
|
||||
# 检查是否有任一对应文档在变更列表中
|
||||
has_doc = False
|
||||
for ed in expected_docs:
|
||||
if ed in doc_files:
|
||||
has_doc = True
|
||||
break
|
||||
# 目录级匹配
|
||||
if ed.endswith("/"):
|
||||
if any(d.startswith(ed) for d in doc_files):
|
||||
has_doc = True
|
||||
break
|
||||
if not has_doc:
|
||||
result["code_without_docs"].append({
|
||||
"file": cf,
|
||||
"expected_docs": expected_docs,
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
|
||||
COMPLIANCE_STATE_PATH = os.path.join(".kiro", ".compliance_state.json")
|
||||
|
||||
|
||||
def save_compliance_state(result, needs_check):
|
||||
"""持久化合规检查结果,供 audit-writer 子代理读取"""
|
||||
os.makedirs(".kiro", exist_ok=True)
|
||||
now = datetime.now(TZ_TAIPEI)
|
||||
state = {
|
||||
"needs_check": needs_check,
|
||||
"scanned_at": now.isoformat(),
|
||||
**result,
|
||||
}
|
||||
with open(COMPLIANCE_STATE_PATH, "w", encoding="utf-8") as f:
|
||||
json.dump(state, f, indent=2, ensure_ascii=False)
|
||||
|
||||
|
||||
def main():
|
||||
files = get_changed_files()
|
||||
if not files:
|
||||
save_compliance_state({"new_migration_sql": [], "new_or_modified_sql": [],
|
||||
"code_without_docs": [], "new_files": [],
|
||||
"has_bd_manual": False, "has_audit_record": False,
|
||||
"has_ddl_baseline": False}, False)
|
||||
print("NO_CHECK_NEEDED")
|
||||
return
|
||||
|
||||
# 过滤噪声
|
||||
real_files = [f for f in files if not is_noise(f)]
|
||||
if not real_files:
|
||||
save_compliance_state({"new_migration_sql": [], "new_or_modified_sql": [],
|
||||
"code_without_docs": [], "new_files": [],
|
||||
"has_bd_manual": False, "has_audit_record": False,
|
||||
"has_ddl_baseline": False}, False)
|
||||
print("NO_CHECK_NEEDED")
|
||||
return
|
||||
|
||||
result = classify_files(files)
|
||||
|
||||
# 判断是否需要审查
|
||||
needs_check = (
|
||||
result["new_migration_sql"]
|
||||
or result["code_without_docs"]
|
||||
or (result["new_migration_sql"] and not result["has_ddl_baseline"])
|
||||
)
|
||||
|
||||
# 始终持久化结果
|
||||
save_compliance_state(result, needs_check)
|
||||
|
||||
if not needs_check:
|
||||
print("NO_CHECK_NEEDED")
|
||||
return
|
||||
|
||||
# 输出精简 JSON 供 LLM 审查
|
||||
print(json.dumps(result, indent=2, ensure_ascii=False))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception as e:
|
||||
# 出错时不阻塞,输出无需检查
|
||||
print("NO_CHECK_NEEDED")
|
||||
233
.kiro/scripts/prompt_on_submit.py
Normal file
233
.kiro/scripts/prompt_on_submit.py
Normal file
@@ -0,0 +1,233 @@
|
||||
#!/usr/bin/env python3
|
||||
"""prompt_on_submit — promptSubmit 合并 hook 脚本。
|
||||
|
||||
合并原 audit_flagger + prompt_audit_log 的功能:
|
||||
1. git status → 风险判定 → 写 .kiro/.audit_state.json
|
||||
2. 记录 prompt 日志 → docs/audit/prompt_logs/
|
||||
3. 记录当前 git fingerprint 快照 → .kiro/.git_snapshot.json(供 agentStop 对比)
|
||||
|
||||
所有功能块用 try/except 隔离,单个失败不影响其他。
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
TZ_TAIPEI = timezone(timedelta(hours=8))
|
||||
|
||||
# ── 风险规则(来自 audit_flagger) ──
|
||||
RISK_RULES = [
|
||||
(re.compile(r"^apps/etl/connectors/feiqiu/(api|cli|config|database|loaders|models|orchestration|scd|tasks|utils|quality)/"), "etl"),
|
||||
(re.compile(r"^apps/backend/app/"), "backend"),
|
||||
(re.compile(r"^apps/admin-web/src/"), "admin-web"),
|
||||
(re.compile(r"^apps/miniprogram/(miniapp|miniprogram)/"), "miniprogram"),
|
||||
(re.compile(r"^packages/shared/"), "shared"),
|
||||
(re.compile(r"^db/"), "db"),
|
||||
]
|
||||
|
||||
NOISE_PATTERNS = [
|
||||
re.compile(r"^docs/audit/"),
|
||||
re.compile(r"^\.kiro/"),
|
||||
re.compile(r"^tmp/"),
|
||||
re.compile(r"^\.hypothesis/"),
|
||||
]
|
||||
|
||||
DB_PATTERNS = [
|
||||
re.compile(r"^db/"),
|
||||
re.compile(r"/migrations/"),
|
||||
re.compile(r"\.sql$"),
|
||||
re.compile(r"\.prisma$"),
|
||||
]
|
||||
|
||||
STATE_PATH = os.path.join(".kiro", ".audit_state.json")
|
||||
SNAPSHOT_PATH = os.path.join(".kiro", ".git_snapshot.json")
|
||||
PROMPT_ID_PATH = os.path.join(".kiro", ".last_prompt_id.json")
|
||||
|
||||
|
||||
def now_taipei():
|
||||
return datetime.now(TZ_TAIPEI)
|
||||
|
||||
|
||||
def sha1hex(s: str) -> str:
|
||||
return hashlib.sha1(s.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def get_changed_files() -> list[str]:
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "status", "--porcelain"],
|
||||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=10
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return []
|
||||
except Exception:
|
||||
return []
|
||||
files = []
|
||||
for line in r.stdout.splitlines():
|
||||
if len(line) < 4:
|
||||
continue
|
||||
path = line[3:].strip()
|
||||
if " -> " in path:
|
||||
path = path.split(" -> ")[-1]
|
||||
path = path.strip().strip('"').replace("\\", "/")
|
||||
if path:
|
||||
files.append(path)
|
||||
return files
|
||||
|
||||
|
||||
def is_noise(f: str) -> bool:
|
||||
return any(p.search(f) for p in NOISE_PATTERNS)
|
||||
|
||||
|
||||
def safe_read_json(path):
|
||||
if not os.path.isfile(path):
|
||||
return {}
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def write_json(path, data):
|
||||
os.makedirs(os.path.dirname(path) or ".kiro", exist_ok=True)
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
|
||||
# ── 功能块 1:风险标记(audit_flagger) ──
|
||||
def do_audit_flag(all_files, now):
|
||||
files = sorted(set(f for f in all_files if not is_noise(f)))
|
||||
|
||||
if not files:
|
||||
write_json(STATE_PATH, {
|
||||
"audit_required": False,
|
||||
"db_docs_required": False,
|
||||
"reasons": [],
|
||||
"changed_files": [],
|
||||
"change_fingerprint": "",
|
||||
"marked_at": now.isoformat(),
|
||||
"last_reminded_at": None,
|
||||
})
|
||||
return
|
||||
|
||||
reasons = []
|
||||
audit_required = False
|
||||
db_docs_required = False
|
||||
|
||||
for f in files:
|
||||
for pattern, label in RISK_RULES:
|
||||
if pattern.search(f):
|
||||
audit_required = True
|
||||
tag = f"dir:{label}"
|
||||
if tag not in reasons:
|
||||
reasons.append(tag)
|
||||
if "/" not in f:
|
||||
audit_required = True
|
||||
if "root-file" not in reasons:
|
||||
reasons.append("root-file")
|
||||
if any(p.search(f) for p in DB_PATTERNS):
|
||||
db_docs_required = True
|
||||
if "db-schema-change" not in reasons:
|
||||
reasons.append("db-schema-change")
|
||||
|
||||
fp = sha1hex("\n".join(files))
|
||||
|
||||
# 保留已有 last_reminded_at
|
||||
last_reminded = None
|
||||
existing = safe_read_json(STATE_PATH)
|
||||
if existing.get("change_fingerprint") == fp:
|
||||
last_reminded = existing.get("last_reminded_at")
|
||||
|
||||
write_json(STATE_PATH, {
|
||||
"audit_required": audit_required,
|
||||
"db_docs_required": db_docs_required,
|
||||
"reasons": reasons,
|
||||
"changed_files": files[:50],
|
||||
"change_fingerprint": fp,
|
||||
"marked_at": now.isoformat(),
|
||||
"last_reminded_at": last_reminded,
|
||||
})
|
||||
|
||||
|
||||
# ── 功能块 2:Prompt 日志 ──
|
||||
def do_prompt_log(now):
|
||||
prompt_id = f"P{now.strftime('%Y%m%d-%H%M%S')}"
|
||||
prompt_raw = os.environ.get("USER_PROMPT", "")
|
||||
|
||||
if len(prompt_raw) > 20000:
|
||||
prompt_raw = prompt_raw[:5000] + "\n[TRUNCATED: prompt too long]"
|
||||
|
||||
summary = " ".join(prompt_raw.split()).strip()
|
||||
if len(summary) > 120:
|
||||
summary = summary[:120] + "…"
|
||||
if not summary:
|
||||
summary = "(empty prompt)"
|
||||
|
||||
log_dir = os.path.join("docs", "audit", "prompt_logs")
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
filename = f"prompt_log_{now.strftime('%Y%m%d_%H%M%S')}.md"
|
||||
entry = f"""- [{prompt_id}] {now.strftime('%Y-%m-%d %H:%M:%S %z')}
|
||||
- summary: {summary}
|
||||
- prompt:
|
||||
```text
|
||||
{prompt_raw}
|
||||
```
|
||||
"""
|
||||
with open(os.path.join(log_dir, filename), "w", encoding="utf-8") as f:
|
||||
f.write(entry)
|
||||
|
||||
write_json(PROMPT_ID_PATH, {"prompt_id": prompt_id, "at": now.isoformat()})
|
||||
|
||||
|
||||
# ── 功能块 3:Git 快照(供 agentStop 对比检测非 Kiro 变更) ──
|
||||
def do_git_snapshot(all_files, now):
|
||||
fp = sha1hex("\n".join(sorted(all_files))) if all_files else ""
|
||||
write_json(SNAPSHOT_PATH, {
|
||||
"files": sorted(all_files)[:100],
|
||||
"fingerprint": fp,
|
||||
"taken_at": now.isoformat(),
|
||||
})
|
||||
|
||||
|
||||
def main():
|
||||
# 非 git 仓库直接退出
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "rev-parse", "--is-inside-work-tree"],
|
||||
capture_output=True, text=True, encoding="utf-8", errors="replace", timeout=5
|
||||
)
|
||||
if r.returncode != 0:
|
||||
return
|
||||
except Exception:
|
||||
return
|
||||
|
||||
now = now_taipei()
|
||||
all_files = get_changed_files()
|
||||
|
||||
# 各功能块独立 try/except
|
||||
try:
|
||||
do_audit_flag(all_files, now)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
do_prompt_log(now)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
do_git_snapshot(all_files, now)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception:
|
||||
pass
|
||||
134
.kiro/scripts/session_log.py
Normal file
134
.kiro/scripts/session_log.py
Normal file
@@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env python3
|
||||
"""session_log — agentStop 时记录本次对话的完整日志。
|
||||
|
||||
收集来源:
|
||||
- 环境变量 AGENT_OUTPUT(Kiro 注入的 agent 输出)
|
||||
- 环境变量 USER_PROMPT(最近一次用户输入)
|
||||
- .kiro/.last_prompt_id.json(Prompt ID 溯源)
|
||||
- .kiro/.audit_state.json(变更文件列表)
|
||||
- git diff --stat(变更统计)
|
||||
|
||||
输出:docs/audit/session_logs/session_<timestamp>.md
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
TZ_TAIPEI = timezone(timedelta(hours=8))
|
||||
LOG_DIR = os.path.join("docs", "audit", "session_logs")
|
||||
STATE_PATH = os.path.join(".kiro", ".audit_state.json")
|
||||
PROMPT_ID_PATH = os.path.join(".kiro", ".last_prompt_id.json")
|
||||
|
||||
|
||||
def now_taipei():
|
||||
return datetime.now(TZ_TAIPEI)
|
||||
|
||||
|
||||
def safe_read_json(path):
|
||||
if not os.path.isfile(path):
|
||||
return {}
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def git_diff_stat():
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "diff", "--stat", "HEAD"],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
return r.stdout.strip() if r.returncode == 0 else "(git diff failed)"
|
||||
except Exception:
|
||||
return "(git not available)"
|
||||
|
||||
|
||||
def git_status_short():
|
||||
try:
|
||||
r = subprocess.run(
|
||||
["git", "status", "--short"],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
return r.stdout.strip() if r.returncode == 0 else ""
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def main():
|
||||
now = now_taipei()
|
||||
ts = now.strftime("%Y%m%d_%H%M%S")
|
||||
timestamp_display = now.strftime("%Y-%m-%d %H:%M:%S %z")
|
||||
|
||||
# 收集数据
|
||||
agent_output = os.environ.get("AGENT_OUTPUT", "")
|
||||
user_prompt = os.environ.get("USER_PROMPT", "")
|
||||
prompt_info = safe_read_json(PROMPT_ID_PATH)
|
||||
audit_state = safe_read_json(STATE_PATH)
|
||||
prompt_id = prompt_info.get("prompt_id", "unknown")
|
||||
|
||||
# 截断超长内容,避免日志文件过大
|
||||
max_len = 50000
|
||||
if len(agent_output) > max_len:
|
||||
agent_output = agent_output[:max_len] + "\n\n[TRUNCATED: output exceeds 50KB]"
|
||||
if len(user_prompt) > 10000:
|
||||
user_prompt = user_prompt[:10000] + "\n\n[TRUNCATED: prompt exceeds 10KB]"
|
||||
|
||||
diff_stat = git_diff_stat()
|
||||
status_short = git_status_short()
|
||||
changed_files = audit_state.get("changed_files", [])
|
||||
|
||||
os.makedirs(LOG_DIR, exist_ok=True)
|
||||
filename = f"session_{ts}.md"
|
||||
filepath = os.path.join(LOG_DIR, filename)
|
||||
|
||||
content = f"""# Session Log — {timestamp_display}
|
||||
|
||||
- Prompt-ID: `{prompt_id}`
|
||||
- Audit Required: `{audit_state.get('audit_required', 'N/A')}`
|
||||
- Reasons: {', '.join(audit_state.get('reasons', [])) or 'none'}
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
{user_prompt or '(not captured)'}
|
||||
```
|
||||
|
||||
## Agent Output
|
||||
|
||||
```text
|
||||
{agent_output or '(not captured)'}
|
||||
```
|
||||
|
||||
## Changed Files ({len(changed_files)})
|
||||
|
||||
```
|
||||
{chr(10).join(changed_files[:80]) if changed_files else '(none)'}
|
||||
```
|
||||
|
||||
## Git Diff Stat
|
||||
|
||||
```
|
||||
{diff_stat}
|
||||
```
|
||||
|
||||
## Git Status
|
||||
|
||||
```
|
||||
{status_short or '(clean)'}
|
||||
```
|
||||
"""
|
||||
|
||||
with open(filepath, "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception:
|
||||
pass
|
||||
1
.kiro/specs/01-miniapp-db-foundation/.config.kiro
Normal file
1
.kiro/specs/01-miniapp-db-foundation/.config.kiro
Normal file
@@ -0,0 +1 @@
|
||||
{"generationMode": "requirements-first"}
|
||||
431
.kiro/specs/01-miniapp-db-foundation/design.md
Normal file
431
.kiro/specs/01-miniapp-db-foundation/design.md
Normal file
@@ -0,0 +1,431 @@
|
||||
# 设计文档:小程序数据库基础设施层(miniapp-db-foundation)
|
||||
|
||||
## 概述
|
||||
|
||||
本设计实现 P1 基础设施层的三大核心能力:
|
||||
|
||||
1. **业务库 Schema 划分**:在 `test_zqyy_app` 中创建 `auth`(认证)和 `biz`(业务)两个 Schema,配合权限管理
|
||||
2. **ETL 库 RLS 视图层**:在 `test_etl_feiqiu.app` Schema 中为 35 张 DWD/DWS 表创建行级安全视图,通过 `site_id` 隔离多门店数据
|
||||
3. **FDW 跨库映射**:通过 `postgres_fdw` 将 ETL 库的 RLS 视图映射为业务库的只读外部表
|
||||
|
||||
**环境变量驱动**:所有数据库名称通过 `.env` 环境变量引用,不硬编码。迁移脚本中使用占位符,验证脚本从 `PG_DSN` / `APP_DB_DSN` 解析连接信息。
|
||||
|
||||
| 环境变量 | 用途 | 示例值 |
|
||||
|---------|------|--------|
|
||||
| `PG_DSN` | ETL 库连接字符串 | `postgresql://user:pass@host:5432/test_etl_feiqiu` |
|
||||
| `APP_DB_DSN` | 业务库连接字符串 | `postgresql://user:pass@host:5432/test_zqyy_app` |
|
||||
|
||||
整体数据流向:
|
||||
|
||||
```
|
||||
ETL 库(PG_DSN) 业务库(APP_DB_DSN)
|
||||
┌─────────────────────┐ ┌─────────────────────┐
|
||||
│ dwd.dim_member │ │ auth (用户认证) │
|
||||
│ dwd.dim_assistant │ │ biz (业务数据) │
|
||||
│ dws.dws_* │ │ public (系统管理) │
|
||||
│ dws.cfg_* │ │ │
|
||||
│ │ │ │ fdw_etl │
|
||||
│ ▼ │ postgres_fdw │ ├ v_dim_member │
|
||||
│ app.v_dim_member │ ◄──────────────► │ ├ v_dim_assistant │
|
||||
│ app.v_dws_* │ IMPORT SCHEMA │ └ v_dws_* │
|
||||
│ (RLS: site_id 过滤) │ │ (外部表,只读) │
|
||||
└─────────────────────┘ └─────────────────────┘
|
||||
```
|
||||
|
||||
## 架构
|
||||
|
||||
### 分层架构
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
subgraph "业务库(APP_DB_DSN)"
|
||||
AUTH["auth Schema<br/>用户认证、权限、映射"]
|
||||
BIZ["biz Schema<br/>业务数据"]
|
||||
PUBLIC["public Schema<br/>系统管理表(保留)"]
|
||||
FDW["fdw_etl Schema<br/>FDW 外部表(只读)"]
|
||||
end
|
||||
|
||||
subgraph "ETL 库(PG_DSN)"
|
||||
APP["app Schema<br/>RLS 视图层"]
|
||||
DWD["dwd Schema<br/>明细层(11 张表)"]
|
||||
DWS["dws Schema<br/>汇总层(24 张表)"]
|
||||
end
|
||||
|
||||
FDW -->|"postgres_fdw<br/>IMPORT FOREIGN SCHEMA"| APP
|
||||
APP -->|"WHERE site_id = current_setting(...)"| DWD
|
||||
APP -->|"WHERE site_id = current_setting(...)"| DWS
|
||||
```
|
||||
|
||||
### 执行顺序
|
||||
|
||||
迁移脚本必须按以下顺序执行:
|
||||
|
||||
1. **ETL 库**(通过 `PG_DSN` 连接):创建 `app` Schema → 创建 `app_reader` 角色 → 创建 RLS 视图 → 授权
|
||||
2. **业务库**(通过 `APP_DB_DSN` 连接):创建 `auth`/`biz` Schema → 安装 `postgres_fdw` → 创建外部服务器 → 用户映射 → 导入外部表 → 授权
|
||||
|
||||
## 组件与接口
|
||||
|
||||
### 组件 1:Schema 管理(业务库)
|
||||
|
||||
**职责**:在业务库(`APP_DB_DSN`)中创建 `auth` 和 `biz` Schema,配置权限。
|
||||
|
||||
**SQL 接口**:
|
||||
```sql
|
||||
-- 创建 Schema
|
||||
CREATE SCHEMA IF NOT EXISTS auth;
|
||||
CREATE SCHEMA IF NOT EXISTS biz;
|
||||
|
||||
-- 授权 app_user
|
||||
GRANT USAGE ON SCHEMA auth TO app_user;
|
||||
GRANT USAGE ON SCHEMA biz TO app_user;
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA auth TO app_user;
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA biz TO app_user;
|
||||
|
||||
-- 未来新表自动授权
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA auth
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO app_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA biz
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO app_user;
|
||||
```
|
||||
|
||||
### 组件 2:RLS 视图层(ETL 库)
|
||||
|
||||
**职责**:在 ETL 库(`PG_DSN`)的 `app` Schema 中为每张源表创建带 `site_id` 过滤的视图。
|
||||
|
||||
**视图命名规则**:`app.v_<源表名>`,例如 `app.v_dim_member`、`app.v_dws_member_consumption_summary`。
|
||||
|
||||
**视图模板**:
|
||||
```sql
|
||||
CREATE OR REPLACE VIEW app.v_<源表名> AS
|
||||
SELECT * FROM <schema>.<源表名>
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
```
|
||||
|
||||
**DWD 层视图清单(11 张)**:
|
||||
|
||||
| 视图名 | 源表 |
|
||||
|--------|------|
|
||||
| `app.v_dim_member` | `dwd.dim_member` |
|
||||
| `app.v_dim_assistant` | `dwd.dim_assistant` |
|
||||
| `app.v_dim_member_card_account` | `dwd.dim_member_card_account` |
|
||||
| `app.v_dim_table` | `dwd.dim_table` |
|
||||
| `app.v_dwd_settlement_head` | `dwd.dwd_settlement_head` |
|
||||
| `app.v_dwd_table_fee_log` | `dwd.dwd_table_fee_log` |
|
||||
| `app.v_dwd_assistant_service_log` | `dwd.dwd_assistant_service_log` |
|
||||
| `app.v_dwd_recharge_order` | `dwd.dwd_recharge_order` |
|
||||
| `app.v_dwd_store_goods_sale` | `dwd.dwd_store_goods_sale` |
|
||||
| `app.v_dim_staff` | `dwd.dim_staff` |
|
||||
| `app.v_dim_staff_ex` | `dwd.dim_staff_ex` |
|
||||
|
||||
**DWS 层视图清单(24 张)**:
|
||||
|
||||
| 视图名 | 源表 |
|
||||
|--------|------|
|
||||
| `app.v_dws_member_consumption_summary` | `dws.dws_member_consumption_summary` |
|
||||
| `app.v_dws_member_visit_detail` | `dws.dws_member_visit_detail` |
|
||||
| `app.v_dws_member_winback_index` | `dws.dws_member_winback_index` |
|
||||
| `app.v_dws_member_newconv_index` | `dws.dws_member_newconv_index` |
|
||||
| `app.v_dws_member_recall_index` | `dws.dws_member_recall_index` |
|
||||
| `app.v_dws_member_assistant_relation_index` | `dws.dws_member_assistant_relation_index` |
|
||||
| `app.v_dws_member_assistant_intimacy` | `dws.dws_member_assistant_intimacy` |
|
||||
| `app.v_dws_assistant_daily_detail` | `dws.dws_assistant_daily_detail` |
|
||||
| `app.v_dws_assistant_monthly_summary` | `dws.dws_assistant_monthly_summary` |
|
||||
| `app.v_dws_assistant_salary_calc` | `dws.dws_assistant_salary_calc` |
|
||||
| `app.v_dws_assistant_customer_stats` | `dws.dws_assistant_customer_stats` |
|
||||
| `app.v_dws_assistant_finance_analysis` | `dws.dws_assistant_finance_analysis` |
|
||||
| `app.v_dws_finance_daily_summary` | `dws.dws_finance_daily_summary` |
|
||||
| `app.v_dws_finance_income_structure` | `dws.dws_finance_income_structure` |
|
||||
| `app.v_dws_finance_recharge_summary` | `dws.dws_finance_recharge_summary` |
|
||||
| `app.v_dws_finance_discount_detail` | `dws.dws_finance_discount_detail` |
|
||||
| `app.v_dws_finance_expense_summary` | `dws.dws_finance_expense_summary` |
|
||||
| `app.v_dws_platform_settlement` | `dws.dws_platform_settlement` |
|
||||
| `app.v_dws_assistant_recharge_commission` | `dws.dws_assistant_recharge_commission` |
|
||||
| `app.v_cfg_performance_tier` | `dws.cfg_performance_tier` |
|
||||
| `app.v_cfg_assistant_level_price` | `dws.cfg_assistant_level_price` |
|
||||
| `app.v_cfg_bonus_rules` | `dws.cfg_bonus_rules` |
|
||||
| `app.v_cfg_index_parameters` | `dws.cfg_index_parameters` |
|
||||
| `app.v_dws_order_summary` | `dws.dws_order_summary` |
|
||||
|
||||
**P2 预留(注释标记,暂不创建)**:
|
||||
- `dws.dws_member_spending_power_index` → 待 P2 完成后补充
|
||||
- `dws.dws_assistant_order_contribution` → 待 P2 完成后补充
|
||||
|
||||
**`cfg_*` 表特殊处理**:配置表(`cfg_performance_tier`、`cfg_assistant_level_price`、`cfg_bonus_rules`、`cfg_index_parameters`)可能不含 `site_id` 列。对于不含 `site_id` 的配置表,视图直接 `SELECT *` 不加过滤条件。
|
||||
|
||||
**权限配置**:
|
||||
```sql
|
||||
-- 创建只读角色(如不存在)
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = 'app_reader') THEN
|
||||
CREATE ROLE app_reader LOGIN;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
GRANT USAGE ON SCHEMA app TO app_reader;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA app TO app_reader;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA app GRANT SELECT ON TABLES TO app_reader;
|
||||
```
|
||||
|
||||
### 组件 3:FDW 跨库映射(业务库)
|
||||
|
||||
**职责**:通过 `postgres_fdw` 将 ETL 库 `app` Schema 的视图映射为业务库 `fdw_etl` Schema 的外部表。
|
||||
|
||||
**实现方式**:使用 `IMPORT FOREIGN SCHEMA` 批量导入,而非逐表定义外部表。这与现有 `db/fdw/setup_fdw_test.sql` 的模式一致。
|
||||
|
||||
**环境感知**:迁移脚本中的 `host`、`dbname`、`port`、`password` 等连接参数使用占位符 `'***'`,部署时根据环境替换。项目已有 `db/fdw/setup_fdw_test.sql`(测试环境)和 `db/fdw/setup_fdw.sql`(生产环境)的分环境模式,本次迁移脚本遵循相同模式——提供测试环境和生产环境两个版本。
|
||||
|
||||
```sql
|
||||
-- 安装扩展
|
||||
CREATE EXTENSION IF NOT EXISTS postgres_fdw;
|
||||
|
||||
-- 创建外部服务器
|
||||
-- host / dbname / port 按环境替换,从 PG_DSN 解析 ETL 库连接信息
|
||||
CREATE SERVER IF NOT EXISTS etl_feiqiu_server
|
||||
FOREIGN DATA WRAPPER postgres_fdw
|
||||
OPTIONS (host '***', dbname '***', port '***');
|
||||
|
||||
-- 用户映射(密码按环境替换)
|
||||
CREATE USER MAPPING IF NOT EXISTS FOR app_user
|
||||
SERVER etl_feiqiu_server
|
||||
OPTIONS (user 'app_reader', password '***');
|
||||
|
||||
-- 创建目标 Schema
|
||||
CREATE SCHEMA IF NOT EXISTS fdw_etl;
|
||||
|
||||
-- 批量导入
|
||||
IMPORT FOREIGN SCHEMA app
|
||||
FROM SERVER etl_feiqiu_server
|
||||
INTO fdw_etl;
|
||||
|
||||
-- 授权
|
||||
GRANT USAGE ON SCHEMA fdw_etl TO app_user;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA fdw_etl TO app_user;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA fdw_etl GRANT SELECT ON TABLES TO app_user;
|
||||
```
|
||||
|
||||
**设计决策**:
|
||||
1. 使用 `IMPORT FOREIGN SCHEMA` 而非逐表 `CREATE FOREIGN TABLE`——自动匹配列定义,避免手动维护列类型不一致的风险
|
||||
2. 新增 RLS 视图后只需重新执行 `IMPORT` 即可同步
|
||||
3. 与现有 `db/fdw/setup_fdw_test.sql` 保持一致
|
||||
4. 服务器名使用通用名 `etl_feiqiu_server`(不含环境前缀),通过连接参数区分环境
|
||||
|
||||
### 组件 4:验证脚本
|
||||
|
||||
**职责**:自动化检查所有数据库对象是否正确创建。
|
||||
|
||||
**文件位置**:`scripts/ops/validate_p1_db_foundation.py`
|
||||
|
||||
**接口**:
|
||||
```python
|
||||
def validate_p1_db_foundation() -> dict:
|
||||
"""
|
||||
返回验证结果字典:
|
||||
{
|
||||
"schemas": {"auth": bool, "biz": bool, "app": bool, "fdw_etl": bool},
|
||||
"rls_views": {"app.v_dim_member": bool, ...},
|
||||
"fdw_tables": {"fdw_etl.v_dim_member": bool, ...},
|
||||
"rls_filtering": bool,
|
||||
"permissions": {"app_user": bool, "app_reader": bool},
|
||||
"errors": [str, ...]
|
||||
}
|
||||
"""
|
||||
```
|
||||
|
||||
**环境变量依赖**(强制从 `.env` 加载,缺失时 `RuntimeError` 终止):
|
||||
- `PG_DSN`:ETL 库连接字符串(从中解析 host、port、dbname)
|
||||
- `APP_DB_DSN`:业务库连接字符串(从中解析 host、port、dbname)
|
||||
- 脚本通过 `load_dotenv()` 加载根 `.env`,禁止硬编码任何数据库名称或连接参数
|
||||
|
||||
## 数据模型
|
||||
|
||||
### Schema 拓扑
|
||||
|
||||
```mermaid
|
||||
erDiagram
|
||||
test_zqyy_app_auth {
|
||||
string schema_name "auth"
|
||||
string purpose "用户认证、权限、映射"
|
||||
}
|
||||
test_zqyy_app_biz {
|
||||
string schema_name "biz"
|
||||
string purpose "业务数据(任务、备注、AI、Excel)"
|
||||
}
|
||||
test_zqyy_app_fdw_etl {
|
||||
string schema_name "fdw_etl"
|
||||
string purpose "FDW 外部表(只读)"
|
||||
}
|
||||
test_zqyy_app_public {
|
||||
string schema_name "public"
|
||||
string purpose "系统管理表(保留)"
|
||||
}
|
||||
test_etl_feiqiu_app {
|
||||
string schema_name "app"
|
||||
string purpose "RLS 视图层"
|
||||
}
|
||||
test_etl_feiqiu_dwd {
|
||||
string schema_name "dwd"
|
||||
string purpose "明细层(11 张表)"
|
||||
}
|
||||
test_etl_feiqiu_dws {
|
||||
string schema_name "dws"
|
||||
string purpose "汇总层(24 张表)"
|
||||
}
|
||||
|
||||
test_etl_feiqiu_app ||--o{ test_etl_feiqiu_dwd : "视图引用"
|
||||
test_etl_feiqiu_app ||--o{ test_etl_feiqiu_dws : "视图引用"
|
||||
test_zqyy_app_fdw_etl ||--|| test_etl_feiqiu_app : "postgres_fdw 映射"
|
||||
```
|
||||
|
||||
### RLS 视图数据流
|
||||
|
||||
对于含 `site_id` 的表:
|
||||
```
|
||||
源表数据(全量)→ RLS 视图(site_id 过滤)→ FDW 外部表(只读访问)
|
||||
```
|
||||
|
||||
对于不含 `site_id` 的配置表(`cfg_*`):
|
||||
```
|
||||
源表数据(全量)→ 直通视图(无过滤)→ FDW 外部表(只读访问)
|
||||
```
|
||||
|
||||
### 迁移脚本清单
|
||||
|
||||
| 序号 | 目标库 | 文件名 | 内容 |
|
||||
|------|--------|--------|------|
|
||||
| 1 | ETL 库(`PG_DSN`) | `YYYY-MM-DD__p1_create_app_schema_rls_views.sql` | 创建 app Schema + 全部 RLS 视图 + app_reader 权限 |
|
||||
| 2 | 业务库(`APP_DB_DSN`) | `YYYY-MM-DD__p1_create_auth_biz_schemas.sql` | 创建 auth/biz Schema + app_user 权限 |
|
||||
| 3 | 业务库(`APP_DB_DSN`) | `YYYY-MM-DD__p1_setup_fdw_etl.sql` | FDW 扩展 + 外部服务器 + 用户映射 + 导入外部表 |
|
||||
|
||||
|
||||
## 正确性属性(Correctness Properties)
|
||||
|
||||
*属性是系统在所有有效执行中都应保持为真的特征或行为——本质上是关于系统应该做什么的形式化陈述。属性是人类可读规格与机器可验证正确性保证之间的桥梁。*
|
||||
|
||||
### Property 1:默认权限自动授予
|
||||
|
||||
*For any* 在 `auth` 或 `biz` Schema 中新创建的表,`app_user` 角色都应自动获得 SELECT、INSERT、UPDATE、DELETE 权限,无需额外手动授权。
|
||||
|
||||
**Validates: Requirements 1.5**
|
||||
|
||||
### Property 2:public Schema 不变量
|
||||
|
||||
*For any* 迁移脚本执行前后,`test_zqyy_app.public` Schema 中的表集合应保持不变——迁移不应删除、重命名或修改 `public` 中的现有表。
|
||||
|
||||
**Validates: Requirements 1.6**
|
||||
|
||||
### Property 3:RLS 视图定义包含 site_id 过滤
|
||||
|
||||
*For any* `test_etl_feiqiu.app` Schema 中的 RLS 视图(含 `site_id` 列的源表对应的视图),其视图定义 SQL 中都应包含 `current_setting('app.current_site_id')` 过滤条件。
|
||||
|
||||
**Validates: Requirements 2.4**
|
||||
|
||||
### Property 4:RLS 过滤正确性
|
||||
|
||||
*For any* 含 `site_id` 列的 RLS 视图和任意有效的 `site_id` 值,设置 `app.current_site_id` 后查询该视图,返回结果中所有行的 `site_id` 都应等于设置的值。
|
||||
|
||||
**Validates: Requirements 2.5**
|
||||
|
||||
### Property 5:未设置 site_id 时 RLS 视图拒绝访问
|
||||
|
||||
*For any* 含 `site_id` 过滤的 RLS 视图,在未设置 `app.current_site_id` 的会话中执行查询,应抛出错误而非返回数据。
|
||||
|
||||
**Validates: Requirements 2.6**
|
||||
|
||||
### Property 6:FDW 外部表完整性与数据一致性
|
||||
|
||||
*For any* `test_etl_feiqiu.app` Schema 中的视图,`test_zqyy_app.fdw_etl` Schema 中都应存在对应的可查询外部表,且在相同 `site_id` 条件下,外部表返回的数据与 RLS 视图返回的数据一致。
|
||||
|
||||
**Validates: Requirements 3.4, 3.5, 3.6**
|
||||
|
||||
### Property 7:迁移脚本结构合规性
|
||||
|
||||
*For any* `db/etl_feiqiu/migrations/` 或 `db/zqyy_app/migrations/` 中本次新增的迁移脚本文件,文件名应匹配 `YYYY-MM-DD__*.sql` 模式,且文件内容中应包含回滚语句(以注释形式)。
|
||||
|
||||
**Validates: Requirements 4.3, 4.4**
|
||||
|
||||
### Property 8:迁移脚本幂等性
|
||||
|
||||
*For any* 本次新增的迁移脚本,连续执行两次的结果应与执行一次相同——第二次执行不应产生错误。
|
||||
|
||||
**Validates: Requirements 4.5**
|
||||
|
||||
### Property 9:环境变量缺失时验证脚本报错
|
||||
|
||||
*For any* 必需环境变量(`PG_DSN`、`APP_DB_DSN`)的缺失组合,验证脚本应立即抛出错误终止,而非静默使用默认值或空字符串。
|
||||
|
||||
**Validates: Requirements 5.8**
|
||||
|
||||
## 错误处理
|
||||
|
||||
### 迁移脚本错误处理
|
||||
|
||||
| 场景 | 处理方式 |
|
||||
|------|---------|
|
||||
| Schema 已存在 | `CREATE SCHEMA IF NOT EXISTS` 幂等跳过 |
|
||||
| 视图已存在 | `CREATE OR REPLACE VIEW` 覆盖更新 |
|
||||
| 角色不存在 | `DO $$ ... IF NOT EXISTS ... END $$` 条件创建 |
|
||||
| 源表不存在(P2 待建表) | 以注释形式预留,不创建视图 |
|
||||
| FDW 服务器已存在 | `CREATE SERVER IF NOT EXISTS` 幂等跳过 |
|
||||
| 用户映射已存在 | `CREATE USER MAPPING IF NOT EXISTS` 幂等跳过 |
|
||||
| `IMPORT FOREIGN SCHEMA` 表已存在 | 先 `DROP SCHEMA fdw_etl CASCADE` 再重新导入(脚本中提供选项) |
|
||||
|
||||
### 验证脚本错误处理
|
||||
|
||||
| 场景 | 处理方式 |
|
||||
|------|---------|
|
||||
| 环境变量缺失 | `RuntimeError` 立即终止,输出缺失变量名 |
|
||||
| 数据库连接失败 | 捕获 `psycopg2.OperationalError`,输出连接参数(脱敏)和错误信息 |
|
||||
| Schema/视图/外部表不存在 | 记录为失败项,继续检查其余项目 |
|
||||
| RLS 过滤验证无数据 | 标记为 SKIP(无法验证),不标记为失败 |
|
||||
| 权限查询失败 | 记录具体错误,继续检查 |
|
||||
|
||||
### `current_setting` 未设置时的行为
|
||||
|
||||
PostgreSQL 中 `current_setting('app.current_site_id')` 在未设置时会抛出 `ERROR: unrecognized configuration parameter "app.current_site_id"`。这是期望行为(需求 2.6),确保不会意外返回全量数据。
|
||||
|
||||
如果需要更友好的错误信息,可以使用 `current_setting('app.current_site_id', true)` 返回 NULL,然后在视图中用 `CASE` 处理。但当前设计选择让 PostgreSQL 原生报错,因为:
|
||||
1. 更安全——不可能绕过
|
||||
2. 后端代码必须显式设置 `SET app.current_site_id = ...`,这是一个强制约束
|
||||
|
||||
## 测试策略
|
||||
|
||||
### 属性测试(Property-Based Testing)
|
||||
|
||||
使用 Python `hypothesis` 框架,测试目录:`tests/`(Monorepo 级属性测试目录)。
|
||||
|
||||
每个属性测试至少运行 100 次迭代。每个测试用注释标注对应的设计属性编号。
|
||||
|
||||
标注格式:`# Feature: miniapp-db-foundation, Property N: <属性标题>`
|
||||
|
||||
**属性测试清单**:
|
||||
|
||||
| 属性 | 测试方法 | 生成器 |
|
||||
|------|---------|--------|
|
||||
| P1 默认权限 | 生成随机表名,在 auth/biz 中创建表,验证 app_user 权限 | `hypothesis.strategies.text` 生成合法 SQL 标识符 |
|
||||
| P3 视图定义过滤 | 遍历所有 app schema 视图,检查定义 SQL | 无需生成器,遍历所有视图 |
|
||||
| P4 RLS 过滤正确性 | 生成随机 site_id,设置后查询视图,验证结果 | `hypothesis.strategies.integers` 生成 site_id |
|
||||
| P5 未设置 site_id 报错 | 遍历所有 RLS 视图,在新会话中查询 | 无需生成器,遍历所有视图 |
|
||||
| P7 脚本结构合规 | 遍历所有新增迁移脚本,验证命名和内容 | 无需生成器,遍历文件 |
|
||||
| P8 幂等性 | 对每个迁移脚本执行两次,验证无错误 | 无需生成器 |
|
||||
| P9 环境变量缺失 | 生成环境变量缺失组合,验证报错 | `hypothesis.strategies.sampled_from` 生成缺失组合 |
|
||||
|
||||
**注意**:P2(public schema 不变量)和 P6(FDW 数据一致性)需要真实数据库环境,作为集成测试在验证脚本中实现,而非 hypothesis 属性测试。
|
||||
|
||||
### 单元测试
|
||||
|
||||
单元测试聚焦于验证脚本的逻辑正确性:
|
||||
|
||||
- 验证脚本在 Schema 缺失时正确报告失败
|
||||
- 验证脚本在权限不足时正确报告
|
||||
- 验证脚本的输出格式正确(JSON 结构)
|
||||
- 环境变量缺失时的错误消息包含变量名
|
||||
|
||||
### 集成测试
|
||||
|
||||
集成测试通过验证脚本 `scripts/ops/validate_p1_db_foundation.py` 实现,覆盖:
|
||||
|
||||
- 全部 Schema 存在性检查
|
||||
- 全部 RLS 视图存在性和过滤正确性
|
||||
- 全部 FDW 外部表存在性和可查询性
|
||||
- 权限配置完整性
|
||||
- FDW 数据与 RLS 视图数据一致性
|
||||
88
.kiro/specs/01-miniapp-db-foundation/requirements.md
Normal file
88
.kiro/specs/01-miniapp-db-foundation/requirements.md
Normal file
@@ -0,0 +1,88 @@
|
||||
# 需求文档:小程序数据库基础设施层(miniapp-db-foundation)
|
||||
|
||||
## 简介
|
||||
|
||||
P1 基础设施层是整个小程序系统的第一个 SPEC,无前置依赖,是所有后续 SPEC 的硬依赖。本 SPEC 负责在业务库 `test_zqyy_app` 中建立清晰的 Schema 划分(`auth` + `biz`),在 ETL 库 `test_etl_feiqiu` 中为数据依赖矩阵列出的所有 DWD/DWS 表创建 RLS 视图(按 `site_id` 隔离),并通过 `postgres_fdw` 将 RLS 视图映射为业务库的外部表,使后端无需直连 ETL 库即可读取汇总/维度数据。
|
||||
|
||||
## 术语表
|
||||
|
||||
- **Schema_Manager**:负责在 PostgreSQL 数据库中创建和管理 Schema、权限配置的迁移脚本系统
|
||||
- **RLS_View_Layer**:在 `test_etl_feiqiu.app` Schema 中创建的一组视图,通过 `current_setting('app.current_site_id')` 按 `site_id` 过滤数据,实现行级安全隔离
|
||||
- **FDW_Bridge**:通过 `postgres_fdw` 扩展在 `test_zqyy_app.fdw_etl` Schema 中创建的外部表集合,只读映射 ETL 库 `app` Schema 的 RLS 视图
|
||||
- **Migration_Script**:存放在 `db/zqyy_app/migrations/` 或 `db/etl_feiqiu/migrations/` 中的纯 SQL 迁移脚本,以日期前缀命名
|
||||
- **Validation_Script**:用于验证数据库对象是否正确创建、权限是否配置正确、数据是否可查询的 Python 脚本
|
||||
- **site_id**:门店标识符,类型为 `BIGINT`,用于多门店数据隔离
|
||||
- **app_reader**:ETL 库侧的只读角色,供 FDW 用户映射使用
|
||||
- **app_user**:业务库侧的应用连接角色,通过 FDW 读取 ETL 数据
|
||||
|
||||
## 需求
|
||||
|
||||
### 需求 1:业务库 Schema 划分与权限配置
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要 `test_zqyy_app` 中有清晰的 Schema 划分(`auth` + `biz`),以便按功能组织业务表。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN Migration_Script 执行完成, THE Schema_Manager SHALL 在 `test_zqyy_app` 中创建 `auth` Schema
|
||||
2. WHEN Migration_Script 执行完成, THE Schema_Manager SHALL 在 `test_zqyy_app` 中创建 `biz` Schema
|
||||
3. WHEN `auth` Schema 创建完成, THE Schema_Manager SHALL 授予 `app_user` 角色对 `auth` Schema 的 USAGE 权限和对其中所有表的 SELECT、INSERT、UPDATE、DELETE 权限
|
||||
4. WHEN `biz` Schema 创建完成, THE Schema_Manager SHALL 授予 `app_user` 角色对 `biz` Schema 的 USAGE 权限和对其中所有表的 SELECT、INSERT、UPDATE、DELETE 权限
|
||||
5. WHEN 新表在 `auth` 或 `biz` Schema 中创建, THE Schema_Manager SHALL 通过 ALTER DEFAULT PRIVILEGES 自动授予 `app_user` 角色相应权限
|
||||
6. THE Migration_Script SHALL 保留 `public` Schema 中现有的系统管理表(`admin_users`、`roles`、`permissions` 等)不受影响
|
||||
|
||||
### 需求 2:ETL 库 RLS 视图层创建
|
||||
|
||||
**用户故事:** 作为系统管理员,我需要 RLS 视图按 `site_id` 隔离数据,以便多门店数据安全。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN Migration_Script 执行完成, THE Schema_Manager SHALL 在 `test_etl_feiqiu` 中创建 `app` Schema(如不存在)
|
||||
2. WHEN `app` Schema 创建完成, THE RLS_View_Layer SHALL 为数据依赖矩阵中列出的每张 DWD 表创建对应的 RLS 视图(共 11 张:`dim_member`、`dim_assistant`、`dim_member_card_account`、`dim_table`、`dwd_settlement_head`、`dwd_table_fee_log`、`dwd_assistant_service_log`、`dwd_recharge_order`、`dwd_store_goods_sale`、`dim_staff`、`dim_staff_ex`)
|
||||
3. WHEN `app` Schema 创建完成, THE RLS_View_Layer SHALL 为数据依赖矩阵中列出的每张 DWS 表创建对应的 RLS 视图(共 24 张,包含 `dws_*` 和 `cfg_*` 表)
|
||||
4. THE RLS_View_Layer 中每个视图 SHALL 包含 `WHERE site_id = current_setting('app.current_site_id')::bigint` 过滤条件
|
||||
5. WHEN 设置 `app.current_site_id` 为某门店 ID 后查询 RLS 视图, THE RLS_View_Layer SHALL 仅返回该门店的数据
|
||||
6. WHEN 未设置 `app.current_site_id` 时查询 RLS 视图, THE RLS_View_Layer SHALL 抛出错误而非返回全部数据
|
||||
7. THE Schema_Manager SHALL 授予 `app_reader` 角色对 `app` Schema 的 USAGE 权限和对其中所有视图的 SELECT 权限
|
||||
8. THE RLS_View_Layer SHALL 为 P2 待建表(`dws_member_spending_power_index`、`dws_assistant_order_contribution`)在迁移脚本中以注释形式预留位置
|
||||
|
||||
### 需求 3:FDW 外部表映射
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要通过 FDW 从 `test_zqyy_app` 读取 ETL 库的 DWS/DWD 数据,以便小程序页面展示 ETL 计算结果。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN Migration_Script 执行完成, THE FDW_Bridge SHALL 在 `test_zqyy_app` 中安装 `postgres_fdw` 扩展
|
||||
2. WHEN `postgres_fdw` 扩展安装完成, THE FDW_Bridge SHALL 创建指向 `test_etl_feiqiu` 的外部服务器 `test_etl_feiqiu_server`
|
||||
3. WHEN 外部服务器创建完成, THE FDW_Bridge SHALL 创建 `app_user` 到 `app_reader` 的用户映射
|
||||
4. WHEN 用户映射创建完成, THE FDW_Bridge SHALL 在 `fdw_etl` Schema 中通过 `IMPORT FOREIGN SCHEMA app` 导入所有外部表
|
||||
5. WHEN 外部表导入完成, THE FDW_Bridge SHALL 对 `fdw_etl` Schema 中的每张外部表执行 `SELECT` 查询验证可读性
|
||||
6. WHEN 外部表查询成功, THE FDW_Bridge 返回的数据 SHALL 与 ETL 库 `app` Schema 中对应 RLS 视图的数据一致
|
||||
7. THE FDW_Bridge SHALL 授予 `app_user` 角色对 `fdw_etl` Schema 的 USAGE 权限和对其中所有外部表的 SELECT 权限
|
||||
|
||||
### 需求 4:迁移脚本管理
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要所有数据库变更都有对应的迁移脚本,以便变更可追溯、可重放、可回滚。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE Migration_Script SHALL 将 ETL 库变更(RLS 视图创建、`app` Schema 权限)存放在 `db/etl_feiqiu/migrations/` 目录中
|
||||
2. THE Migration_Script SHALL 将业务库变更(Schema 创建、FDW 配置)存放在 `db/zqyy_app/migrations/` 目录中
|
||||
3. THE Migration_Script SHALL 使用日期前缀命名(格式:`YYYY-MM-DD__<描述>.sql`)
|
||||
4. THE Migration_Script SHALL 在每个脚本中包含回滚语句(以注释形式)
|
||||
5. THE Migration_Script SHALL 使用 `IF NOT EXISTS` / `OR REPLACE` 等幂等语法,确保重复执行不会报错
|
||||
6. THE Migration_Script SHALL 使用 UTF-8 编码,纯 SQL(非 ORM)
|
||||
|
||||
### 需求 5:端到端验证
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要一个自动化验证脚本,确认所有数据库对象正确创建且数据可访问。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN Validation_Script 执行时, THE Validation_Script SHALL 检查 `test_zqyy_app` 中 `auth` 和 `biz` Schema 是否存在
|
||||
2. WHEN Validation_Script 执行时, THE Validation_Script SHALL 检查 `test_etl_feiqiu.app` Schema 中所有 RLS 视图是否存在
|
||||
3. WHEN Validation_Script 执行时, THE Validation_Script SHALL 检查 `test_zqyy_app.fdw_etl` 中所有外部表是否存在
|
||||
4. WHEN Validation_Script 执行时, THE Validation_Script SHALL 对每张外部表执行 `SELECT count(*)` 验证可查询性
|
||||
5. WHEN Validation_Script 执行时, THE Validation_Script SHALL 设置 `app.current_site_id` 后验证 RLS 视图正确过滤数据
|
||||
6. WHEN Validation_Script 执行时, THE Validation_Script SHALL 验证 `app_user` 和 `app_reader` 角色的权限配置正确
|
||||
7. WHEN 验证发现异常, THE Validation_Script SHALL 输出具体的失败项和错误信息
|
||||
8. THE Validation_Script SHALL 从 `.env` 加载数据库连接参数(`PG_DSN`、`APP_DB_DSN`),缺失时立即报错终止
|
||||
108
.kiro/specs/01-miniapp-db-foundation/tasks.md
Normal file
108
.kiro/specs/01-miniapp-db-foundation/tasks.md
Normal file
@@ -0,0 +1,108 @@
|
||||
# 实现计划:小程序数据库基础设施层(miniapp-db-foundation)
|
||||
|
||||
## 概述
|
||||
|
||||
按照"ETL 库先行 → 业务库跟进 → FDW 桥接 → 验证收尾"的顺序,将设计拆分为可增量执行的编码任务。每个迁移脚本使用幂等语法,所有连接参数通过环境变量驱动。
|
||||
|
||||
## 任务
|
||||
|
||||
- [x] 1. ETL 库:创建 app Schema 与 RLS 视图
|
||||
- [x] 1.1 编写迁移脚本 `db/etl_feiqiu/migrations/YYYY-MM-DD__p1_create_app_schema_rls_views.sql`
|
||||
- 创建 `app` Schema(`CREATE SCHEMA IF NOT EXISTS app`)
|
||||
- 创建 `app_reader` 角色(条件创建,`DO $$ ... IF NOT EXISTS ... END $$`)
|
||||
- 为 11 张 DWD 表创建 RLS 视图(`CREATE OR REPLACE VIEW app.v_<表名> AS SELECT * FROM dwd.<表名> WHERE site_id = current_setting('app.current_site_id')::bigint`)
|
||||
- 为 24 张 DWS 表创建 RLS 视图(同上模式;`cfg_*` 配置表若无 `site_id` 列则直接 `SELECT *` 不加过滤)
|
||||
- 在脚本末尾以注释形式预留 P2 待建表位置(`dws_member_spending_power_index`、`dws_assistant_order_contribution`)
|
||||
- 授予 `app_reader` 对 `app` Schema 的 USAGE + SELECT 权限 + ALTER DEFAULT PRIVILEGES
|
||||
- 包含回滚语句(注释形式)
|
||||
- _Requirements: 2.1, 2.2, 2.3, 2.4, 2.7, 2.8, 4.1, 4.3, 4.4, 4.5, 4.6_
|
||||
|
||||
- [x] 1.2 编写属性测试:RLS 视图定义包含 site_id 过滤
|
||||
- **Property 3: RLS 视图定义包含 site_id 过滤**
|
||||
- 遍历 `app` Schema 所有视图,查询 `pg_views.definition`,验证含 `site_id` 列的源表对应视图包含 `current_setting` 过滤条件
|
||||
- **Validates: Requirements 2.4**
|
||||
|
||||
- [x] 1.3 编写属性测试:未设置 site_id 时 RLS 视图拒绝访问
|
||||
- **Property 5: 未设置 site_id 时 RLS 视图拒绝访问**
|
||||
- 遍历所有含 `site_id` 过滤的 RLS 视图,在新会话(未设置 `app.current_site_id`)中执行 `SELECT`,验证抛出错误
|
||||
- **Validates: Requirements 2.6**
|
||||
|
||||
- [x] 2. 业务库:创建 auth/biz Schema 与权限
|
||||
- [x] 2.1 编写迁移脚本 `db/zqyy_app/migrations/YYYY-MM-DD__p1_create_auth_biz_schemas.sql`
|
||||
- 创建 `auth` Schema(`CREATE SCHEMA IF NOT EXISTS auth`)
|
||||
- 创建 `biz` Schema(`CREATE SCHEMA IF NOT EXISTS biz`)
|
||||
- 授予 `app_user` 对 `auth`/`biz` 的 USAGE + CRUD 权限
|
||||
- 设置 ALTER DEFAULT PRIVILEGES 自动授权未来新表
|
||||
- 包含回滚语句(注释形式)
|
||||
- _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 4.2, 4.3, 4.4, 4.5, 4.6_
|
||||
|
||||
- [x] 2.2 编写属性测试:默认权限自动授予
|
||||
- **Property 1: 默认权限自动授予**
|
||||
- 使用 hypothesis 生成随机合法表名,在 `auth`/`biz` 中创建临时表,验证 `app_user` 自动获得 SELECT/INSERT/UPDATE/DELETE 权限,测试后清理
|
||||
- **Validates: Requirements 1.5**
|
||||
|
||||
- [x] 3. 业务库:配置 FDW 跨库映射
|
||||
- [x] 3.1 编写迁移脚本 `db/zqyy_app/migrations/YYYY-MM-DD__p1_setup_fdw_etl.sql`
|
||||
- 安装 `postgres_fdw` 扩展
|
||||
- 创建外部服务器 `etl_feiqiu_server`(host/dbname/port 使用占位符 `'***'`,按环境替换)
|
||||
- 创建 `app_user` → `app_reader` 用户映射
|
||||
- 创建 `fdw_etl` Schema
|
||||
- 执行 `IMPORT FOREIGN SCHEMA app FROM SERVER etl_feiqiu_server INTO fdw_etl`
|
||||
- 授予 `app_user` 对 `fdw_etl` 的 USAGE + SELECT 权限 + ALTER DEFAULT PRIVILEGES
|
||||
- 包含回滚语句(注释形式)
|
||||
- _Requirements: 3.1, 3.2, 3.3, 3.4, 3.7, 4.2, 4.3, 4.4, 4.5, 4.6_
|
||||
|
||||
- [x] 3.2 编写属性测试:FDW 外部表完整性与数据一致性
|
||||
- **Property 6: FDW 外部表完整性与数据一致性**
|
||||
- 遍历 ETL 库 `app` Schema 所有视图,验证 `fdw_etl` 中存在对应外部表且可查询;在相同 `site_id` 下对比数据一致性
|
||||
- **Validates: Requirements 3.4, 3.5, 3.6**
|
||||
|
||||
- [x] 4. Checkpoint — 迁移脚本验证
|
||||
- 确保三个迁移脚本均可在测试环境中成功执行(按顺序:ETL 库 → 业务库 Schema → 业务库 FDW)
|
||||
- 验证重复执行不报错(幂等性)
|
||||
- 如有问题请告知用户
|
||||
|
||||
- [x] 5. 编写端到端验证脚本
|
||||
- [x] 5.1 创建 `scripts/ops/validate_p1_db_foundation.py`
|
||||
- 通过 `load_dotenv()` 加载根 `.env`,读取 `PG_DSN` 和 `APP_DB_DSN`,缺失时 `RuntimeError` 终止
|
||||
- 检查业务库中 `auth`、`biz` Schema 存在性
|
||||
- 检查 ETL 库中 `app` Schema 及所有 RLS 视图存在性(对照设计文档中的 35 张视图清单)
|
||||
- 检查业务库中 `fdw_etl` Schema 及所有外部表存在性
|
||||
- 对每张外部表执行 `SELECT count(*)` 验证可查询性
|
||||
- 设置 `app.current_site_id` 后验证 RLS 视图过滤正确性
|
||||
- 验证 `app_user` 和 `app_reader` 角色权限配置
|
||||
- 输出结构化验证结果(通过/失败/跳过),失败项附带错误信息
|
||||
- _Requirements: 5.1, 5.2, 5.3, 5.4, 5.5, 5.6, 5.7, 5.8_
|
||||
|
||||
- [x] 5.2 编写属性测试:RLS 过滤正确性
|
||||
- **Property 4: RLS 过滤正确性**
|
||||
- 使用 hypothesis 生成随机 `site_id`,设置 `app.current_site_id` 后查询含 `site_id` 的 RLS 视图,验证返回结果中所有行的 `site_id` 等于设置值
|
||||
- **Validates: Requirements 2.5**
|
||||
|
||||
- [x] 5.3 编写属性测试:环境变量缺失报错
|
||||
- **Property 9: 环境变量缺失时验证脚本报错**
|
||||
- 使用 hypothesis 生成 `PG_DSN`/`APP_DB_DSN` 的缺失组合,验证脚本抛出 `RuntimeError`
|
||||
- **Validates: Requirements 5.8**
|
||||
|
||||
- [x] 5.4 编写属性测试:迁移脚本结构合规性
|
||||
- **Property 7: 迁移脚本结构合规性**
|
||||
- 遍历本次新增的迁移脚本文件,验证文件名匹配 `YYYY-MM-DD__*.sql` 模式,且内容包含回滚语句注释
|
||||
- **Validates: Requirements 4.3, 4.4**
|
||||
|
||||
- [x] 5.5 编写属性测试:迁移脚本幂等性
|
||||
- **Property 8: 迁移脚本幂等性**
|
||||
- 对每个迁移脚本连续执行两次,验证第二次执行无错误
|
||||
- **Validates: Requirements 4.5**
|
||||
|
||||
- [x] 6. Final checkpoint — 全量验证
|
||||
- 运行验证脚本 `python scripts/ops/validate_p1_db_foundation.py`,确认所有检查项通过
|
||||
- 运行属性测试 `cd C:\NeoZQYY && pytest tests/ -v -k p1`,确认所有属性测试通过
|
||||
- 如有问题请告知用户
|
||||
|
||||
## 说明
|
||||
|
||||
- 标记 `*` 的子任务为可选,可跳过以加速 MVP
|
||||
- 每个任务引用具体的需求编号,确保可追溯
|
||||
- Checkpoint 确保增量验证
|
||||
- 属性测试使用 Python hypothesis 框架,测试文件放在根目录 `tests/` 下
|
||||
- 迁移脚本中的数据库连接参数(host/dbname/port/password)均使用占位符,按环境替换
|
||||
1
.kiro/specs/02-etl-dws-miniapp-extensions/.config.kiro
Normal file
1
.kiro/specs/02-etl-dws-miniapp-extensions/.config.kiro
Normal file
@@ -0,0 +1 @@
|
||||
{"generationMode": "requirements-first"}
|
||||
562
.kiro/specs/02-etl-dws-miniapp-extensions/design.md
Normal file
562
.kiro/specs/02-etl-dws-miniapp-extensions/design.md
Normal file
@@ -0,0 +1,562 @@
|
||||
# 设计文档:ETL DWS 层扩展 — 小程序数据支撑
|
||||
|
||||
## 概述
|
||||
|
||||
本设计覆盖三个独立但相关的 DWS 层扩展模块:
|
||||
|
||||
1. **助教订单流水四项统计**:新建 `AssistantOrderContributionTask`,计算每名助教每日的订单总流水、订单净流水、时效贡献流水、时效净贡献。算法核心在于"时效贡献流水"的台费分摊和酒水食品均分逻辑。
|
||||
2. **会员消费汇总扩展**:修改现有 `MemberConsumptionTask`,新增 30/60/90 天充值窗口统计和次均消费字段。
|
||||
3. **定档折算惩罚**:修改现有 `AssistantDailyTask`,新增时间重叠检测和惩罚计算逻辑。
|
||||
|
||||
三个模块共享同一套 RLS 视图 + FDW 映射基础设施。
|
||||
|
||||
### 设计决策
|
||||
|
||||
1. **助教订单流水独立建表**:四项统计粒度为 `(site_id, assistant_id, stat_date)`,与现有 `dws_assistant_daily_detail` 粒度相同但语义不同(daily_detail 聚焦服务时长/金额,contribution 聚焦订单级流水分摊),独立建表避免字段膨胀。
|
||||
2. **时效贡献流水计算为纯函数**:核心分摊算法(`compute_time_weighted_revenue`)设计为静态方法,输入为结构化的订单数据,输出为每名助教的贡献值。不依赖数据库,便于属性测试。
|
||||
3. **惩罚检测在 transform 阶段完成**:定档折算惩罚的时间重叠检测和计算在 `AssistantDailyTask.transform` 中完成,不新建独立任务,因为惩罚字段与日度明细同粒度。
|
||||
4. **充值统计复用现有 extract 模式**:在 `MemberConsumptionTask` 中新增一个 `_extract_recharge_stats` 方法,与现有的 `_extract_consumption_stats` 并行提取,在 transform 阶段合并。
|
||||
|
||||
## 架构
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
subgraph 数据来源(DWD)
|
||||
SH[dwd_settlement_head<br/>结算主表]
|
||||
TF[dwd_table_fee_log<br/>台费明细]
|
||||
ASL[dwd_assistant_service_log<br/>助教服务记录]
|
||||
RO[dwd_recharge_order<br/>充值订单]
|
||||
end
|
||||
|
||||
subgraph 新建任务
|
||||
AOC[AssistantOrderContributionTask<br/>助教订单流水统计]
|
||||
end
|
||||
|
||||
subgraph 修改任务
|
||||
MCT[MemberConsumptionTask<br/>+充值窗口 +次均消费]
|
||||
ADT[AssistantDailyTask<br/>+惩罚检测 +惩罚计算]
|
||||
end
|
||||
|
||||
subgraph 输出(DWS)
|
||||
T1[dws_assistant_order_contribution<br/>新建]
|
||||
T2[dws_member_consumption_summary<br/>扩展字段]
|
||||
T3[dws_assistant_daily_detail<br/>扩展字段]
|
||||
end
|
||||
|
||||
subgraph 基础设施
|
||||
RLS[app schema RLS 视图]
|
||||
FDW[fdw_etl 外部表映射]
|
||||
end
|
||||
|
||||
SH --> AOC
|
||||
TF --> AOC
|
||||
ASL --> AOC
|
||||
AOC --> T1
|
||||
|
||||
RO --> MCT
|
||||
SH --> MCT
|
||||
MCT --> T2
|
||||
|
||||
ASL --> ADT
|
||||
TF --> ADT
|
||||
ADT --> T3
|
||||
|
||||
T1 --> RLS
|
||||
T2 --> RLS
|
||||
T3 --> RLS
|
||||
RLS --> FDW
|
||||
```
|
||||
|
||||
### 任务依赖关系
|
||||
|
||||
```
|
||||
DWD_LOAD_FROM_ODS
|
||||
├── DWS_ASSISTANT_DAILY (扩展:+惩罚检测计算)
|
||||
├── DWS_MEMBER_CONSUMPTION (扩展:+充值窗口+次均消费)
|
||||
└── DWS_ASSISTANT_ORDER_CONTRIBUTION (新建:四项统计)
|
||||
```
|
||||
|
||||
`DWS_ASSISTANT_ORDER_CONTRIBUTION` 依赖 `DWD_LOAD_FROM_ODS`(需要最新的结算、台费、服务记录数据)。
|
||||
|
||||
## 组件与接口
|
||||
|
||||
### AssistantOrderContributionTask(新建)
|
||||
|
||||
继承 `BaseDwsTask`,实现四项统计计算:
|
||||
|
||||
```python
|
||||
class AssistantOrderContributionTask(BaseDwsTask):
|
||||
DATE_COL = "stat_date"
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "DWS_ASSISTANT_ORDER_CONTRIBUTION"
|
||||
|
||||
def get_target_table(self) -> str:
|
||||
return "dws_assistant_order_contribution"
|
||||
|
||||
def get_primary_keys(self) -> List[str]:
|
||||
return ["site_id", "assistant_id", "stat_date"]
|
||||
|
||||
# --- ETL 主流程 ---
|
||||
def extract(self, context: TaskContext) -> Dict[str, Any]: ...
|
||||
def transform(self, extracted, context) -> List[Dict[str, Any]]: ...
|
||||
# load() 使用 BaseDwsTask 默认实现
|
||||
|
||||
# --- 数据提取 ---
|
||||
def _extract_order_data(self, site_id, start_date, end_date) -> List[Dict]: ...
|
||||
|
||||
# --- 核心计算(纯函数,可独立测试) ---
|
||||
@staticmethod
|
||||
def compute_order_gross_revenue(order: OrderData) -> Decimal:
|
||||
"""订单总流水 = 台费 + 酒水食品 + 所有助教服务费"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
def compute_order_net_revenue(order: OrderData) -> Decimal:
|
||||
"""订单净流水 = 订单总流水 - 所有助教服务分成"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
def compute_time_weighted_revenue(
|
||||
order: OrderData, assistant_id: int
|
||||
) -> Decimal:
|
||||
"""时效贡献流水 = 台费按时长分摊 + 个人服务费 + 酒水食品按时长比例"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
def compute_time_weighted_net_revenue(
|
||||
time_weighted_revenue: Decimal, assistant_commission: Decimal
|
||||
) -> Decimal:
|
||||
"""时效净贡献 = 时效贡献流水 - 个人服务分成"""
|
||||
...
|
||||
```
|
||||
|
||||
### 核心数据结构
|
||||
|
||||
```python
|
||||
@dataclass
|
||||
class TableUsage:
|
||||
"""台桌使用信息"""
|
||||
table_id: int
|
||||
table_area: str # 区域名称(A/B/C/S/TV/M1-M7 等)
|
||||
usage_seconds: int # 台桌使用时长(秒)
|
||||
table_fee: Decimal # 台费/房费
|
||||
|
||||
@dataclass
|
||||
class AssistantService:
|
||||
"""助教服务记录"""
|
||||
assistant_id: int
|
||||
table_id: int
|
||||
service_seconds: int # 服务时长(秒)
|
||||
ledger_amount: Decimal # 服务流水(助教收费)
|
||||
commission: Decimal # 助教分成
|
||||
skill_id: int
|
||||
course_type: str # BASE / BONUS / ROOM
|
||||
|
||||
@dataclass
|
||||
class OrderData:
|
||||
"""订单聚合数据(一个结算单的完整信息)"""
|
||||
order_settle_id: int
|
||||
site_id: int
|
||||
total_table_fee: Decimal # 台费总额
|
||||
total_goods_amount: Decimal # 酒水食品总额
|
||||
tables: List[TableUsage] # 台桌列表
|
||||
assistants: List[AssistantService] # 助教服务列表
|
||||
```
|
||||
|
||||
### 四项统计算法详解
|
||||
|
||||
#### 1. 订单总流水(order_gross_revenue)
|
||||
|
||||
```
|
||||
order_gross_revenue = total_table_fee + total_goods_amount + SUM(所有助教的 ledger_amount)
|
||||
```
|
||||
|
||||
每个参与助教获得相同的 order_gross_revenue 值。
|
||||
|
||||
#### 2. 订单净流水(order_net_revenue)
|
||||
|
||||
```
|
||||
order_net_revenue = order_gross_revenue - SUM(所有助教的 commission)
|
||||
```
|
||||
|
||||
每个参与助教获得相同的 order_net_revenue 值。
|
||||
|
||||
#### 3. 时效贡献流水(time_weighted_revenue)— 核心算法
|
||||
|
||||
这是最复杂的计算,按以下步骤进行:
|
||||
|
||||
**步骤 1:确定每张台桌的有效计费时长**
|
||||
```
|
||||
对于每张台桌 t:
|
||||
助教总服务时长 = SUM(该台桌所有助教的 service_seconds)
|
||||
有效计费时长 = MAX(助教总服务时长, 台桌使用时长)
|
||||
每小时单价 = table_fee / (有效计费时长 / 3600)
|
||||
```
|
||||
|
||||
**步骤 2:按助教在台桌的服务时长分摊台费**
|
||||
```
|
||||
对于每个助教 a 在台桌 t:
|
||||
台费分摊 = 每小时单价 × (助教在该台桌的服务时长 / 3600)
|
||||
|
||||
特殊情况:当助教总服务时长 < 台桌使用时长时
|
||||
按比例缩放:台费分摊 = (table_fee × 台桌使用时长对应比例) / 该台桌助教人数中按时长占比分配
|
||||
即:台费分摊 = (table_fee / 台桌使用时长 × MIN(助教总服务时长, 台桌使用时长))
|
||||
× (助教个人时长 / 助教总服务时长)
|
||||
```
|
||||
|
||||
更精确的公式(统一处理两种情况):
|
||||
```
|
||||
对于台桌 t:
|
||||
billable_seconds = MAX(SUM(助教服务时长), 台桌使用时长)
|
||||
对于助教 a:
|
||||
台费分摊_a = table_fee_t × (service_seconds_a / billable_seconds)
|
||||
```
|
||||
|
||||
> 注意:当 `SUM(助教服务时长) > 台桌使用时长` 时,`billable_seconds = SUM(助教服务时长)`,
|
||||
> 此时各助教按自己的时长占比分摊台费,总和 = table_fee。
|
||||
> 当 `SUM(助教服务时长) < 台桌使用时长` 时,`billable_seconds = 台桌使用时长`,
|
||||
> 此时各助教分摊的台费总和 < table_fee(未被助教覆盖的时段不分配给任何人)。
|
||||
|
||||
**步骤 3:助教个人服务费直接计入**
|
||||
```
|
||||
个人服务费 = 助教的 ledger_amount
|
||||
```
|
||||
|
||||
**步骤 4:酒水食品按助教总时长比例均分**
|
||||
```
|
||||
助教总时长 = SUM(所有助教在所有台桌的 service_seconds)
|
||||
对于助教 a:
|
||||
酒水食品分摊 = total_goods_amount × (助教 a 的总服务时长 / 助教总时长)
|
||||
```
|
||||
|
||||
**合成:**
|
||||
```
|
||||
time_weighted_revenue_a = SUM(各台桌台费分摊_a) + 个人服务费_a + 酒水食品分摊_a
|
||||
```
|
||||
|
||||
#### 4. 时效净贡献(time_weighted_net_revenue)
|
||||
|
||||
```
|
||||
time_weighted_net_revenue_a = time_weighted_revenue_a - commission_a
|
||||
```
|
||||
|
||||
#### 5. 超休/打赏课特殊处理
|
||||
|
||||
当助教为超休/打赏课类型(`course_type = BONUS`)时,该助教不参与订单级分摊:
|
||||
```
|
||||
order_gross_revenue = ledger_amount(个人服务流水)
|
||||
order_net_revenue = ledger_amount - commission
|
||||
time_weighted_revenue = ledger_amount
|
||||
time_weighted_net_revenue = ledger_amount - commission
|
||||
```
|
||||
|
||||
### MemberConsumptionTask 扩展
|
||||
|
||||
在现有任务中新增:
|
||||
|
||||
```python
|
||||
# extract 阶段新增
|
||||
def _extract_recharge_stats(self, site_id: int, stat_date: date) -> Dict[int, Dict]:
|
||||
"""从 dwd_recharge_order 提取 30/60/90 天充值统计"""
|
||||
...
|
||||
|
||||
# transform 阶段新增字段
|
||||
record['recharge_count_30d'] = recharge.get('count_30d', 0)
|
||||
record['recharge_count_60d'] = recharge.get('count_60d', 0)
|
||||
record['recharge_count_90d'] = recharge.get('count_90d', 0)
|
||||
record['recharge_amount_30d'] = recharge.get('amount_30d', Decimal('0'))
|
||||
record['recharge_amount_60d'] = recharge.get('amount_60d', Decimal('0'))
|
||||
record['recharge_amount_90d'] = recharge.get('amount_90d', Decimal('0'))
|
||||
record['avg_ticket_amount'] = (
|
||||
record['total_consume_amount'] / max(record['total_visit_count'], 1)
|
||||
)
|
||||
```
|
||||
|
||||
### AssistantDailyTask 扩展 — 惩罚检测
|
||||
|
||||
在现有任务的 transform 阶段新增惩罚检测逻辑:
|
||||
|
||||
```python
|
||||
# 惩罚检测核心逻辑
|
||||
@staticmethod
|
||||
def detect_overlap_violations(
|
||||
service_records: List[Dict],
|
||||
penalty_areas: Set[str]
|
||||
) -> Dict[Tuple[int, date], List[Dict]]:
|
||||
"""
|
||||
检测同一台桌同一时间段超过 2 名助教挂台的违规。
|
||||
|
||||
penalty_areas: 指定区域集合,如 {'A','B','C','S','TV','M1','M2',...,'M7'}
|
||||
|
||||
返回: {(assistant_id, stat_date): [violation_info, ...]}
|
||||
"""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
def compute_penalty_minutes(
|
||||
actual_minutes: Decimal,
|
||||
per_hour_contribution: Decimal,
|
||||
threshold: Decimal = Decimal('24')
|
||||
) -> Decimal:
|
||||
"""
|
||||
计算惩罚分钟数。
|
||||
|
||||
per_hour_contribution >= threshold: 返回 0
|
||||
per_hour_contribution < threshold:
|
||||
返回 actual_minutes × (1 - per_hour_contribution / threshold)
|
||||
"""
|
||||
...
|
||||
```
|
||||
|
||||
**惩罚区域定义:**
|
||||
- 大厅:A、B、C、S、TV
|
||||
- 麻将房:M1、M2、M3、M4、M5、M6、M7
|
||||
|
||||
**时间重叠检测算法:**
|
||||
1. 按 `(台桌ID, 服务日期)` 分组所有服务记录
|
||||
2. 对每组内的服务记录,检查时间段是否有重叠(任意两个助教的 `[start_time, end_time]` 有交集)
|
||||
3. 若同一时间段内助教数 > 2,标记为违规
|
||||
4. 对违规记录计算 `per_hour_contribution = 台费每小时单价 / 该时段助教人数`
|
||||
5. 根据 `per_hour_contribution` 与 24 元阈值比较,计算 `penalty_minutes`
|
||||
|
||||
## 数据模型
|
||||
|
||||
### dws.dws_assistant_order_contribution(新建)
|
||||
|
||||
```sql
|
||||
CREATE TABLE dws.dws_assistant_order_contribution (
|
||||
contribution_id BIGSERIAL PRIMARY KEY,
|
||||
site_id INTEGER NOT NULL,
|
||||
tenant_id INTEGER NOT NULL,
|
||||
assistant_id BIGINT NOT NULL,
|
||||
assistant_nickname VARCHAR(100),
|
||||
stat_date DATE NOT NULL,
|
||||
|
||||
-- 四项统计
|
||||
order_gross_revenue NUMERIC(14,2) DEFAULT 0,
|
||||
order_net_revenue NUMERIC(14,2) DEFAULT 0,
|
||||
time_weighted_revenue NUMERIC(14,2) DEFAULT 0,
|
||||
time_weighted_net_revenue NUMERIC(14,2) DEFAULT 0,
|
||||
|
||||
-- 辅助字段
|
||||
order_count INTEGER DEFAULT 0,
|
||||
total_service_seconds INTEGER DEFAULT 0,
|
||||
|
||||
-- 元数据
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX idx_aoc_site_assistant_date
|
||||
ON dws.dws_assistant_order_contribution (site_id, assistant_id, stat_date);
|
||||
|
||||
CREATE INDEX idx_aoc_stat_date
|
||||
ON dws.dws_assistant_order_contribution (site_id, stat_date);
|
||||
```
|
||||
|
||||
### dws_member_consumption_summary 扩展字段
|
||||
|
||||
```sql
|
||||
ALTER TABLE dws.dws_member_consumption_summary
|
||||
ADD COLUMN IF NOT EXISTS recharge_count_30d INTEGER DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_count_60d INTEGER DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_count_90d INTEGER DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_amount_30d NUMERIC(14,2) DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_amount_60d NUMERIC(14,2) DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_amount_90d NUMERIC(14,2) DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS avg_ticket_amount NUMERIC(14,2) DEFAULT 0;
|
||||
```
|
||||
|
||||
### dws_assistant_daily_detail 扩展字段
|
||||
|
||||
```sql
|
||||
ALTER TABLE dws.dws_assistant_daily_detail
|
||||
ADD COLUMN IF NOT EXISTS penalty_minutes NUMERIC(10,2) DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS penalty_reason TEXT,
|
||||
ADD COLUMN IF NOT EXISTS is_exempt BOOLEAN DEFAULT FALSE,
|
||||
ADD COLUMN IF NOT EXISTS per_hour_contribution NUMERIC(14,2);
|
||||
```
|
||||
|
||||
### RLS 视图(app schema)
|
||||
|
||||
```sql
|
||||
-- 新建:助教订单流水统计
|
||||
CREATE OR REPLACE VIEW app.v_dws_assistant_order_contribution AS
|
||||
SELECT * FROM dws.dws_assistant_order_contribution
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
-- 已有视图无需修改:dws_member_consumption_summary 和 dws_assistant_daily_detail
|
||||
-- 的 RLS 视图使用 SELECT *,新增字段自动包含
|
||||
```
|
||||
|
||||
### FDW 映射(fdw_etl schema)
|
||||
|
||||
在 `test_zqyy_app.fdw_etl` 中新建外部表:
|
||||
|
||||
```sql
|
||||
CREATE FOREIGN TABLE fdw_etl.dws_assistant_order_contribution (
|
||||
contribution_id BIGINT,
|
||||
site_id INTEGER,
|
||||
tenant_id INTEGER,
|
||||
assistant_id BIGINT,
|
||||
assistant_nickname VARCHAR(100),
|
||||
stat_date DATE,
|
||||
order_gross_revenue NUMERIC(14,2),
|
||||
order_net_revenue NUMERIC(14,2),
|
||||
time_weighted_revenue NUMERIC(14,2),
|
||||
time_weighted_net_revenue NUMERIC(14,2),
|
||||
order_count INTEGER,
|
||||
total_service_seconds INTEGER,
|
||||
created_at TIMESTAMP WITH TIME ZONE,
|
||||
updated_at TIMESTAMP WITH TIME ZONE
|
||||
) SERVER etl_server
|
||||
OPTIONS (schema_name 'app', table_name 'v_dws_assistant_order_contribution');
|
||||
```
|
||||
|
||||
对于扩展字段的表(`dws_member_consumption_summary`、`dws_assistant_daily_detail`),需要 `DROP` 并重建 FDW 外部表定义以包含新字段。
|
||||
|
||||
|
||||
## 正确性属性
|
||||
|
||||
*正确性属性(Correctness Property)是系统在所有合法执行路径上都应成立的行为特征——本质上是对"系统应该做什么"的形式化陈述。属性是人类可读规格与机器可验证正确性保证之间的桥梁。*
|
||||
|
||||
以下属性基于需求文档中的验收标准推导。四项统计的核心计算函数(`compute_order_gross_revenue`、`compute_time_weighted_revenue` 等)和惩罚计算函数(`compute_penalty_minutes`)设计为纯静态方法,不依赖数据库,可直接用于属性测试。
|
||||
|
||||
### Property 1: 订单级统计不变量 — gross/net 各助教相等
|
||||
|
||||
*For any* 订单数据(包含任意数量的台桌、助教服务和酒水食品),所有参与该订单的助教应获得相同的 `order_gross_revenue` 值,且获得相同的 `order_net_revenue` 值。
|
||||
|
||||
推导:`order_gross_revenue` 和 `order_net_revenue` 是订单级聚合值,不按助教个人拆分,因此所有参与助教共享同一个值。
|
||||
|
||||
**Validates: Requirements 2.2, 2.3, 10.1, 10.2**
|
||||
|
||||
### Property 2: 时效贡献流水之和约束
|
||||
|
||||
*For any* 订单数据,所有参与助教的 `time_weighted_revenue` 之和应满足:
|
||||
- 当所有台桌的助教总服务时长 ≥ 台桌使用时长时,之和 = `order_gross_revenue`
|
||||
- 当存在台桌的助教总服务时长 < 台桌使用时长时,之和 ≤ `order_gross_revenue`
|
||||
|
||||
且在所有情况下,之和 ≥ 0。
|
||||
|
||||
推导:台费按时长比例分摊,当助教完全覆盖台桌时长时分摊总和等于台费;酒水食品按时长比例均分总和等于酒水总额;助教服务费直接计入。因此总和 = 台费分摊总和 + 酒水分摊总和 + 服务费总和 ≤ order_gross_revenue。
|
||||
|
||||
**Validates: Requirements 2.4, 10.3**
|
||||
|
||||
### Property 3: 时效净贡献减法关系
|
||||
|
||||
*For any* 助教和订单数据,该助教的 `time_weighted_net_revenue` 应等于 `time_weighted_revenue - commission`(该助教个人的服务分成)。
|
||||
|
||||
推导:这是定义性等式,直接从需求 2.5 得出。
|
||||
|
||||
**Validates: Requirements 2.5, 10.4**
|
||||
|
||||
### Property 4: 惩罚分钟数分段公式
|
||||
|
||||
*For any* 非负的 `actual_minutes` 和非负的 `per_hour_contribution`:
|
||||
- 当 `per_hour_contribution >= 24` 时,`penalty_minutes = 0`
|
||||
- 当 `per_hour_contribution < 24` 时,`penalty_minutes = actual_minutes × (1 - per_hour_contribution / 24)`
|
||||
|
||||
且在所有情况下,`0 ≤ penalty_minutes ≤ actual_minutes`。
|
||||
|
||||
推导:直接从需求 6.3/6.4 的分段公式得出。上界 `actual_minutes` 在 `per_hour_contribution = 0` 时取到。
|
||||
|
||||
**Validates: Requirements 6.3, 6.4, 10.5, 10.6**
|
||||
|
||||
### Property 5: 次均消费公式
|
||||
|
||||
*For any* 非负的 `total_consume_amount` 和非负整数 `total_visit_count`,`avg_ticket_amount` 应等于 `total_consume_amount / MAX(total_visit_count, 1)`。
|
||||
|
||||
推导:直接从需求 3.4 得出。`MAX(total_visit_count, 1)` 防止除零。
|
||||
|
||||
**Validates: Requirements 3.4, 10.7**
|
||||
|
||||
### Property 6: 重叠检测正确性
|
||||
|
||||
*For any* 一组助教服务记录,若在指定区域的同一台桌上存在 3 名或以上助教的服务时间段有重叠,则 `detect_overlap_violations` 应返回非空的违规列表。
|
||||
|
||||
推导:需求 6.1 要求检测"同一台桌同一时间段超过 2 名助教挂台"。我们可以生成随机的服务记录(包含时间段重叠和不重叠的情况),验证检测函数的正确性。
|
||||
|
||||
**Validates: Requirements 6.1**
|
||||
|
||||
## 错误处理
|
||||
|
||||
| 场景 | 处理方式 |
|
||||
|------|----------|
|
||||
| 订单无助教服务记录 | 跳过该订单,不生成统计记录 |
|
||||
| 台桌使用时长为 0 | 台费分摊设为 0,避免除零 |
|
||||
| 助教总服务时长为 0 | 酒水食品分摊设为 0,避免除零 |
|
||||
| 会员无充值记录 | 充值次数/金额设为 0 |
|
||||
| 会员无消费记录 | avg_ticket_amount 设为 0 |
|
||||
| 助教当日无违规 | penalty_minutes = 0,penalty_reason = NULL |
|
||||
| 服务记录缺少时间段信息 | 跳过该记录的重叠检测,日志 WARNING |
|
||||
| per_hour_contribution 为负数 | 视为 0 处理(防御性编程) |
|
||||
| FDW 映射创建失败 | 事务回滚,报错终止 |
|
||||
| 数据库写入失败 | 事务回滚,抛出异常由调度器处理 |
|
||||
|
||||
> **注意:所有数据库操作均在测试库(`test_etl_feiqiu` / `test_zqyy_app`)中进行。**
|
||||
|
||||
## 测试策略
|
||||
|
||||
### 属性测试(hypothesis)
|
||||
|
||||
属性测试位于 `tests/` 目录(Monorepo 级),使用 `hypothesis` 库。
|
||||
|
||||
每个属性测试对应设计文档中的一个 Property,最少运行 100 次迭代。
|
||||
|
||||
测试文件:`tests/test_dws_contribution_properties.py`
|
||||
|
||||
```python
|
||||
# Feature: 02-etl-dws-miniapp-extensions, Property 1: 订单级统计不变量
|
||||
@given(order_data=order_data_strategy())
|
||||
@settings(max_examples=200)
|
||||
def test_gross_net_equal_across_assistants(order_data):
|
||||
"""所有参与助教的 order_gross_revenue 和 order_net_revenue 应分别相等"""
|
||||
gross = AssistantOrderContributionTask.compute_order_gross_revenue(order_data)
|
||||
net = AssistantOrderContributionTask.compute_order_net_revenue(order_data)
|
||||
# 每个助教获得相同的 gross 和 net
|
||||
for assistant in order_data.assistants:
|
||||
assert assistant_gross == gross
|
||||
assert assistant_net == net
|
||||
```
|
||||
|
||||
```python
|
||||
# Feature: 02-etl-dws-miniapp-extensions, Property 4: 惩罚分钟数分段公式
|
||||
@given(
|
||||
actual_minutes=st.decimals(min_value=0, max_value=600, places=2),
|
||||
per_hour_contribution=st.decimals(min_value=0, max_value=200, places=2),
|
||||
)
|
||||
@settings(max_examples=200)
|
||||
def test_penalty_minutes_formula(actual_minutes, per_hour_contribution):
|
||||
"""惩罚分钟数应符合分段公式且在 [0, actual_minutes] 范围内"""
|
||||
result = AssistantDailyTask.compute_penalty_minutes(
|
||||
actual_minutes, per_hour_contribution
|
||||
)
|
||||
if per_hour_contribution >= 24:
|
||||
assert result == 0
|
||||
else:
|
||||
expected = actual_minutes * (1 - per_hour_contribution / 24)
|
||||
assert abs(result - expected) < Decimal('0.01')
|
||||
assert 0 <= result <= actual_minutes
|
||||
```
|
||||
|
||||
属性测试库:`hypothesis`(已在项目依赖中)
|
||||
|
||||
### 单元测试
|
||||
|
||||
单元测试位于 `apps/etl/connectors/feiqiu/tests/unit/`,使用 FakeDB/FakeAPI 工具。
|
||||
|
||||
重点覆盖:
|
||||
- PRD 示例数据验算:使用 PRD 中的具体订单示例(3 名助教、2 张台桌、酒水 600 元)验证四项统计的精确数值
|
||||
- 超休/打赏课边界:验证超休助教的四项统计等于个人流水
|
||||
- 零值边界:无台费、无酒水、无助教服务的极端情况
|
||||
- 惩罚计算边界:per_hour_contribution 恰好等于 24 元的临界值
|
||||
- 充值窗口:验证 30/60/90 天窗口的正确切分
|
||||
- 豁免逻辑:is_exempt = TRUE 时跳过惩罚
|
||||
|
||||
### 测试配置
|
||||
|
||||
- 属性测试:`cd C:\NeoZQYY && pytest tests/test_dws_contribution_properties.py -v`
|
||||
- 单元测试:`cd apps/etl/connectors/feiqiu && pytest tests/unit/test_assistant_order_contribution.py -v`
|
||||
- 每个属性测试标注 `@settings(max_examples=200)`
|
||||
- 每个属性测试注释引用设计文档 Property 编号
|
||||
157
.kiro/specs/02-etl-dws-miniapp-extensions/requirements.md
Normal file
157
.kiro/specs/02-etl-dws-miniapp-extensions/requirements.md
Normal file
@@ -0,0 +1,157 @@
|
||||
# 需求文档:ETL DWS 层扩展 — 小程序数据支撑
|
||||
|
||||
## 简介
|
||||
|
||||
本 Spec 覆盖 P2 任务中 T4–T11 的 ETL DWS 层扩展,为小程序提供三类核心数据支撑:
|
||||
1. 助教订单流水四项统计(`dws_assistant_order_contribution`)
|
||||
2. 会员消费汇总扩展(充值窗口 + 次均消费)
|
||||
3. 定档折算惩罚检测与计算
|
||||
|
||||
同时包含新表的 RLS 视图创建、FDW 映射同步,以及影子跑数验证。
|
||||
|
||||
> SPI 消费力指数(T1–T3)已在独立 Spec `.kiro/specs/spi-spending-power-index/` 中完成,本文档不再重复。
|
||||
|
||||
## 术语表
|
||||
|
||||
- **AssistantOrderContributionTask**:助教订单流水统计 ETL 任务,粒度 `(site_id, assistant_id, stat_date)`
|
||||
- **MemberConsumptionTask**:会员消费汇总 ETL 任务,粒度 `(site_id, member_id, stat_date)`
|
||||
- **AssistantDailyTask**:助教日度业绩明细 ETL 任务,粒度 `(site_id, assistant_id, stat_date)`
|
||||
- **dws_assistant_order_contribution**:助教订单流水四项统计结果表
|
||||
- **dws_member_consumption_summary**:会员消费汇总表(已有,需扩展字段)
|
||||
- **dws_assistant_daily_detail**:助教日度业绩明细表(已有,需扩展字段)
|
||||
- **order_gross_revenue**:订单总流水 — 助教参与订单的全部流水(台费 + 酒水食品 + 助教服务费)
|
||||
- **order_net_revenue**:订单净流水 — 订单总流水 - 该订单所有助教的服务分成总额
|
||||
- **time_weighted_revenue**:时效贡献流水 — 按助教个人服务时长折算的订单金额贡献
|
||||
- **time_weighted_net_revenue**:时效净贡献 — 时效贡献流水 - 该助教个人的服务分成
|
||||
- **penalty_minutes**:定档折算惩罚分钟数 — 因违规被扣减的定档业绩时长
|
||||
- **per_hour_contribution**:单人每小时贡献流水 — 台费/房费每小时实收单价 ÷ 本次基础课助教人数
|
||||
- **RLS 视图**:行级安全视图,位于 ETL 库 `app` schema,按 `site_id` 隔离数据
|
||||
- **FDW 映射**:外部数据包装器映射,将 ETL 库表映射到业务库 `fdw_etl` schema
|
||||
- **settle_type**:结算类型,1=台桌结账,3=商城订单,5=充值订单
|
||||
- **BaseDwsTask**:DWS 层任务基类,提供 delete-before-insert 幂等机制
|
||||
- **delete-before-insert**:幂等更新策略,先按条件删除旧记录再批量插入新记录
|
||||
|
||||
## 需求
|
||||
|
||||
### 需求 1:助教订单流水统计表创建(T4)
|
||||
|
||||
**用户故事:** 作为 ETL 开发者,我需要创建助教订单流水四项统计表,以便存储每名助教每日的订单流水贡献数据。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE 开发者 SHALL 创建 `dws.dws_assistant_order_contribution` 表,主键为 `(site_id, assistant_id, stat_date)`
|
||||
2. THE dws_assistant_order_contribution 表 SHALL 包含四项统计字段:`order_gross_revenue`(订单总流水)、`order_net_revenue`(订单净流水)、`time_weighted_revenue`(时效贡献流水)、`time_weighted_net_revenue`(时效净贡献),精度为 `NUMERIC(14,2)`
|
||||
3. THE dws_assistant_order_contribution 表 SHALL 包含辅助字段:`order_count`(参与订单数)、`total_service_seconds`(总服务时长秒数)、`assistant_nickname`(助教昵称)
|
||||
4. THE dws_assistant_order_contribution 表 SHALL 包含元数据字段:`tenant_id`、`created_at`、`updated_at`
|
||||
5. THE 开发者 SHALL 编写迁移脚本 `db/etl_feiqiu/migrations/<日期>__create_dws_assistant_order_contribution.sql`,在测试库 `test_etl_feiqiu` 中执行建表
|
||||
6. WHEN DDL 在测试库执行成功后,THE 开发者 SHALL 运行 `gen_consolidated_ddl.py` 导出最新 DDL
|
||||
|
||||
### 需求 2:助教订单流水四项统计计算(T5)
|
||||
|
||||
**用户故事:** 作为产品经理,我需要助教订单流水四项统计(订单总流水/订单净流水/时效贡献流水/时效净贡献),以便评估助教个人能力。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE AssistantOrderContributionTask SHALL 从 `dwd.dwd_settlement_head`、`dwd.dwd_table_fee_log`、`dwd.dwd_assistant_service_log` 提取订单、台费和助教服务数据
|
||||
2. WHEN 计算 order_gross_revenue 时,THE AssistantOrderContributionTask SHALL 将助教参与订单的全部流水(台费 + 酒水食品 + 所有助教服务费)累加,每个参与助教获得相同的订单总流水值
|
||||
3. WHEN 计算 order_net_revenue 时,THE AssistantOrderContributionTask SHALL 从订单总流水中减去该订单所有助教的服务分成总额,每个参与助教获得相同的订单净流水值
|
||||
4. WHEN 计算 time_weighted_revenue 时,THE AssistantOrderContributionTask SHALL 按以下步骤折算个人贡献:
|
||||
- 确定每张台桌的有效计费时长:取 MAX(该台桌所有助教服务时长之和, 台桌使用时长)
|
||||
- 按助教在该台桌的服务时长占比分摊台费
|
||||
- 助教个人服务费直接计入
|
||||
- 酒水食品按助教个人服务总时长占所有助教服务总时长的比例均分
|
||||
5. WHEN 计算 time_weighted_net_revenue 时,THE AssistantOrderContributionTask SHALL 从该助教的时效贡献流水中减去该助教个人的服务分成
|
||||
6. WHEN 助教为超休/打赏课类型时,THE AssistantOrderContributionTask SHALL 将四项统计均设为该助教个人的服务流水和分成(不参与订单级分摊)
|
||||
7. THE AssistantOrderContributionTask SHALL 以任务代码 `DWS_ASSISTANT_ORDER_CONTRIBUTION` 注册到 task_registry
|
||||
8. THE AssistantOrderContributionTask SHALL 采用 delete-before-insert 策略按日期窗口幂等更新
|
||||
|
||||
### 需求 3:会员消费汇总扩展(T6)
|
||||
|
||||
**用户故事:** 作为产品经理,我需要客户 30/60/90 天充值次数和金额、次均消费,以便在客户看板中展示。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE 开发者 SHALL 在 `dws.dws_member_consumption_summary` 表中新增以下字段:`recharge_count_30d`、`recharge_count_60d`、`recharge_count_90d`(充值次数,INTEGER)、`recharge_amount_30d`、`recharge_amount_60d`、`recharge_amount_90d`(充值金额,NUMERIC(14,2))、`avg_ticket_amount`(次均消费额度,NUMERIC(14,2))
|
||||
2. THE 开发者 SHALL 编写 ALTER TABLE 迁移脚本在测试库 `test_etl_feiqiu` 中执行字段扩展
|
||||
3. THE 充值数据 SHALL 从 `dwd.dwd_recharge_order` 提取,按 `member_id` 和时间窗口聚合
|
||||
4. THE avg_ticket_amount SHALL 按公式 `total_consume_amount / MAX(total_visit_count, 1)` 计算
|
||||
|
||||
### 需求 4:会员消费汇总任务修改(T7)
|
||||
|
||||
**用户故事:** 作为 ETL 开发者,我需要修改 MemberConsumptionTask 以填充新增的充值窗口和次均消费字段。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE MemberConsumptionTask SHALL 在 extract 阶段新增充值订单提取逻辑,从 `dwd.dwd_recharge_order` 按 30/60/90 天窗口聚合充值次数和金额
|
||||
2. THE MemberConsumptionTask SHALL 在 transform 阶段将充值统计和次均消费填充到输出记录中
|
||||
3. WHEN 会员无充值记录时,THE MemberConsumptionTask SHALL 将充值次数设为 0、充值金额设为 0.00
|
||||
4. WHEN 会员无消费记录时,THE MemberConsumptionTask SHALL 将 avg_ticket_amount 设为 0.00
|
||||
|
||||
### 需求 5:助教日度明细表扩展 — 定档折算惩罚字段(T8)
|
||||
|
||||
**用户故事:** 作为 ETL 开发者,我需要在助教日度明细表中新增定档折算惩罚相关字段,以便存储惩罚检测和计算结果。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE 开发者 SHALL 在 `dws.dws_assistant_daily_detail` 表中新增以下字段:`penalty_minutes` (NUMERIC(10,2))、`penalty_reason` (TEXT)、`is_exempt` (BOOLEAN DEFAULT FALSE)、`per_hour_contribution` (NUMERIC(14,2))
|
||||
2. THE 开发者 SHALL 编写 ALTER TABLE 迁移脚本在测试库 `test_etl_feiqiu` 中执行字段扩展
|
||||
3. WHEN 助教当日无惩罚时,THE penalty_minutes SHALL 为 0,penalty_reason SHALL 为 NULL
|
||||
|
||||
### 需求 6:定档折算惩罚检测与计算逻辑(T9)
|
||||
|
||||
**用户故事:** 作为产品经理,我需要定档折算惩罚数据,以便在绩效页面展示折算详情,防止助教利用低价订单冲档位。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE AssistantDailyTask SHALL 检测规则 2 违规:在指定区域(大厅 A/B/C/S/TV 和麻将房 M1–M7)内,同一台桌同一时间段超过 2 名助教挂台(课程时间段有重叠即算)
|
||||
2. WHEN 检测到违规时,THE AssistantDailyTask SHALL 计算单人每小时贡献流水:台费/房费每小时实收单价 ÷ 本次基础课助教人数
|
||||
3. WHEN per_hour_contribution >= 24 元时,THE AssistantDailyTask SHALL 按满额计入定档业绩时长(penalty_minutes = 0)
|
||||
4. WHEN per_hour_contribution < 24 元时,THE AssistantDailyTask SHALL 按比例折算:`penalty_minutes = 实际服务分钟数 × (1 - per_hour_contribution / 24)`
|
||||
5. WHEN 订单标记为 is_exempt = TRUE 时,THE AssistantDailyTask SHALL 跳过惩罚计算,penalty_minutes 设为 0
|
||||
6. THE 定档折算惩罚 SHALL 仅影响定档业绩时长统计,不影响实际工资时长
|
||||
7. THE AssistantDailyTask SHALL 每日自动计算惩罚,计算频率与现有日度任务一致
|
||||
|
||||
### 需求 7:RLS 视图创建(T10)
|
||||
|
||||
**用户故事:** 作为 ETL 开发者,我需要为新表创建 RLS 视图,以便通过 FDW 安全地向业务库暴露数据。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE 开发者 SHALL 在 ETL 库 `app` schema 中为 `dws_assistant_order_contribution` 创建 RLS 视图,按 `site_id` 过滤
|
||||
2. THE 开发者 SHALL 更新已有 RLS 视图以包含 `dws_member_consumption_summary` 和 `dws_assistant_daily_detail` 的新增字段
|
||||
3. THE RLS 视图 SHALL 使用 `current_setting('app.current_site_id')::INTEGER` 进行行级过滤
|
||||
|
||||
### 需求 8:FDW 映射同步(T10)
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要在业务库中通过 FDW 访问新建和扩展的 ETL 表,以便小程序后端读取数据。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE 开发者 SHALL 在 `test_zqyy_app.fdw_etl` schema 中创建 `dws_assistant_order_contribution` 的外部表映射
|
||||
2. THE 开发者 SHALL 更新 `dws_member_consumption_summary` 和 `dws_assistant_daily_detail` 的 FDW 映射以包含新增字段
|
||||
3. THE FDW 映射 SHALL 通过 `app` schema 的 RLS 视图访问数据,而非直接访问 `dws` schema
|
||||
|
||||
### 需求 9:影子跑数验证(T11)
|
||||
|
||||
**用户故事:** 作为 ETL 开发者,我需要通过影子跑数验证新增统计的正确性,以便确保数据质量。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE 开发者 SHALL 编写验证脚本,对照 PRD 示例数据验算四项统计的计算结果
|
||||
2. THE 验证脚本 SHALL 检查 `dws_assistant_order_contribution` 中四项统计数值的一致性:order_gross_revenue 各助教相等、order_net_revenue 各助教相等、time_weighted_revenue 各助教之和加上误差容忍度等于订单总流水
|
||||
3. THE 验证脚本 SHALL 检查 `dws_member_consumption_summary` 新增字段有值且充值金额与 `dwd_recharge_order` 源数据一致
|
||||
4. THE 验证脚本 SHALL 检查定档折算惩罚字段在符合惩罚条件的记录上正确填充
|
||||
|
||||
### 需求 10:算法正确性测试
|
||||
|
||||
**用户故事:** 作为 ETL 开发者,我需要通过属性测试(hypothesis)验证四项统计和惩罚计算的正确性。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE 属性测试 SHALL 验证:对于任意订单,所有参与助教的 order_gross_revenue 值相等
|
||||
2. THE 属性测试 SHALL 验证:对于任意订单,所有参与助教的 order_net_revenue 值相等
|
||||
3. THE 属性测试 SHALL 验证:对于任意订单,所有参与助教的 time_weighted_revenue 之和应在订单总流水的合理误差范围内(±0.01 元)
|
||||
4. THE 属性测试 SHALL 验证:对于任意助教,time_weighted_net_revenue = time_weighted_revenue - 该助教个人服务分成
|
||||
5. THE 属性测试 SHALL 验证:对于任意 per_hour_contribution >= 24 的记录,penalty_minutes 为 0
|
||||
6. THE 属性测试 SHALL 验证:对于任意 per_hour_contribution < 24 且 per_hour_contribution >= 0 的记录,penalty_minutes = 实际分钟数 × (1 - per_hour_contribution / 24)
|
||||
7. THE 属性测试 SHALL 验证:对于任意会员,avg_ticket_amount = total_consume_amount / MAX(total_visit_count, 1)
|
||||
152
.kiro/specs/02-etl-dws-miniapp-extensions/tasks.md
Normal file
152
.kiro/specs/02-etl-dws-miniapp-extensions/tasks.md
Normal file
@@ -0,0 +1,152 @@
|
||||
# 实现计划:ETL DWS 层扩展 — 小程序数据支撑
|
||||
|
||||
## 概述
|
||||
|
||||
基于设计文档,将实现拆分为:DDL 建表/扩展 → 助教订单流水统计任务 → 会员消费汇总扩展 → 定档折算惩罚 → RLS 视图 + FDW 映射 → 影子跑数验证 六个阶段。所有数据库操作在测试库(`test_etl_feiqiu` / `test_zqyy_app`)中进行。
|
||||
|
||||
## 任务
|
||||
|
||||
- [x] 1. DDL 建表与字段扩展
|
||||
- [x] 1.1 编写迁移脚本创建 `dws.dws_assistant_order_contribution` 表
|
||||
- 新建 `db/etl_feiqiu/migrations/<日期>__create_dws_assistant_order_contribution.sql`
|
||||
- 包含表定义、唯一索引 `idx_aoc_site_assistant_date`、查询索引 `idx_aoc_stat_date`
|
||||
- 字段参照设计文档数据模型章节
|
||||
- _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5_
|
||||
- [x] 1.2 编写迁移脚本扩展 `dws_member_consumption_summary` 字段
|
||||
- 新建 `db/etl_feiqiu/migrations/<日期>__alter_member_consumption_add_recharge_fields.sql`
|
||||
- ALTER TABLE 添加 `recharge_count_30d/60d/90d`、`recharge_amount_30d/60d/90d`、`avg_ticket_amount`
|
||||
- _Requirements: 3.1, 3.2_
|
||||
- [x] 1.3 编写迁移脚本扩展 `dws_assistant_daily_detail` 字段
|
||||
- 新建 `db/etl_feiqiu/migrations/<日期>__alter_assistant_daily_add_penalty_fields.sql`
|
||||
- ALTER TABLE 添加 `penalty_minutes`、`penalty_reason`、`is_exempt`、`per_hour_contribution`
|
||||
- _Requirements: 5.1, 5.2_
|
||||
- [x] 1.4 在测试库 `test_etl_feiqiu` 执行全部迁移脚本
|
||||
- 通过 `PG_DSN`(指向测试库)连接执行 SQL
|
||||
- _Requirements: 1.5, 3.2, 5.2_
|
||||
- [x] 1.5 运行 `gen_consolidated_ddl.py` 导出最新 DDL
|
||||
- 执行 `python scripts/ops/gen_consolidated_ddl.py`
|
||||
- 验证 `docs/database/ddl/etl_feiqiu__dws.sql` 已包含新表和扩展字段
|
||||
- _Requirements: 1.6_
|
||||
|
||||
- [x] 2. 实现助教订单流水统计任务
|
||||
- [x] 2.1 创建数据结构和 `AssistantOrderContributionTask` 骨架
|
||||
- 新建 `apps/etl/connectors/feiqiu/tasks/dws/assistant_order_contribution_task.py`
|
||||
- 定义 `TableUsage`、`AssistantService`、`OrderData` dataclass
|
||||
- 定义 `AssistantOrderContributionTask` 类继承 `BaseDwsTask`
|
||||
- 实现 `get_task_code`、`get_target_table`、`get_primary_keys`
|
||||
- _Requirements: 1.1, 2.7_
|
||||
- [x] 2.2 实现四项统计核心计算(纯函数)
|
||||
- 实现 `compute_order_gross_revenue` 静态方法
|
||||
- 实现 `compute_order_net_revenue` 静态方法
|
||||
- 实现 `compute_time_weighted_revenue` 静态方法(含台费分摊、酒水均分逻辑)
|
||||
- 实现 `compute_time_weighted_net_revenue` 静态方法
|
||||
- 处理超休/打赏课特殊情况
|
||||
- _Requirements: 2.2, 2.3, 2.4, 2.5, 2.6_
|
||||
- [x] 2.3 编写属性测试:订单级统计不变量
|
||||
- **Property 1: 订单级统计不变量 — gross/net 各助教相等**
|
||||
- **Validates: Requirements 2.2, 2.3, 10.1, 10.2**
|
||||
- [x] 2.4 编写属性测试:时效贡献流水之和约束
|
||||
- **Property 2: 时效贡献流水之和 ≤ order_gross_revenue**
|
||||
- **Validates: Requirements 2.4, 10.3**
|
||||
- [x] 2.5 编写属性测试:时效净贡献减法关系
|
||||
- **Property 3: time_weighted_net_revenue = time_weighted_revenue - commission**
|
||||
- **Validates: Requirements 2.5, 10.4**
|
||||
- [x] 2.6 实现 `extract` 方法
|
||||
- 从 `dwd_settlement_head`、`dwd_table_fee_log`、`dwd_assistant_service_log` 提取数据
|
||||
- 按 `order_settle_id` 聚合为 `OrderData` 结构
|
||||
- _Requirements: 2.1_
|
||||
- [x] 2.7 实现 `transform` 方法
|
||||
- 遍历订单,调用四项统计计算函数
|
||||
- 按 `(assistant_id, stat_date)` 聚合日度统计
|
||||
- _Requirements: 2.2, 2.3, 2.4, 2.5, 2.6_
|
||||
- [x] 2.8 注册任务到 task_registry 并导出模块
|
||||
- 在 `tasks/dws/__init__.py` 中导出 `AssistantOrderContributionTask`
|
||||
- 在 `orchestration/task_registry.py` 中注册 `DWS_ASSISTANT_ORDER_CONTRIBUTION`,`layer="DWS"`,`depends_on=["DWD_LOAD_FROM_ODS"]`
|
||||
- _Requirements: 2.7, 2.8_
|
||||
|
||||
- [x] 3. 检查点 — 确保助教订单流水统计测试通过
|
||||
- 运行属性测试:`cd C:\NeoZQYY && pytest tests/test_dws_contribution_properties.py -v`
|
||||
- 确保所有属性测试通过,如有问题请询问用户。
|
||||
|
||||
- [x] 4. 扩展会员消费汇总任务
|
||||
- [x] 4.1 在 `MemberConsumptionTask` 中新增充值统计提取
|
||||
- 新增 `_extract_recharge_stats` 方法,从 `dwd.dwd_recharge_order` 按 30/60/90 天窗口聚合
|
||||
- 在 `extract` 方法中调用并返回充值统计数据
|
||||
- _Requirements: 4.1, 3.3_
|
||||
- [x] 4.2 在 `MemberConsumptionTask.transform` 中填充新字段
|
||||
- 合并充值统计到输出记录
|
||||
- 计算 `avg_ticket_amount = total_consume_amount / MAX(total_visit_count, 1)`
|
||||
- 处理无充值/无消费的边界情况
|
||||
- _Requirements: 4.2, 4.3, 4.4, 3.4_
|
||||
- [x] 4.3 编写属性测试:次均消费公式
|
||||
- **Property 5: avg_ticket_amount = total_consume_amount / MAX(total_visit_count, 1)**
|
||||
- **Validates: Requirements 3.4, 10.7**
|
||||
|
||||
- [x] 5. 实现定档折算惩罚检测与计算
|
||||
- [x] 5.1 实现时间重叠检测逻辑
|
||||
- 在 `AssistantDailyTask` 中新增 `detect_overlap_violations` 静态方法
|
||||
- 定义惩罚区域集合(大厅 A/B/C/S/TV + 麻将房 M1–M7)
|
||||
- 按 `(table_id, service_date)` 分组,检测时间段重叠且助教数 > 2
|
||||
- _Requirements: 6.1_
|
||||
- [x] 5.2 实现惩罚分钟数计算
|
||||
- 在 `AssistantDailyTask` 中新增 `compute_penalty_minutes` 静态方法
|
||||
- 计算 `per_hour_contribution = 台费每小时单价 / 助教人数`
|
||||
- 按分段公式计算 `penalty_minutes`
|
||||
- 处理 `is_exempt = TRUE` 豁免逻辑
|
||||
- _Requirements: 6.2, 6.3, 6.4, 6.5_
|
||||
- [x] 5.3 集成惩罚逻辑到 `AssistantDailyTask.transform`
|
||||
- 在现有聚合逻辑后调用重叠检测和惩罚计算
|
||||
- 将 `penalty_minutes`、`penalty_reason`、`is_exempt`、`per_hour_contribution` 填充到输出记录
|
||||
- _Requirements: 6.6, 6.7_
|
||||
- [x] 5.4 编写属性测试:惩罚分钟数分段公式
|
||||
- **Property 4: 惩罚分钟数符合分段公式且在 [0, actual_minutes] 范围内**
|
||||
- **Validates: Requirements 6.3, 6.4, 10.5, 10.6**
|
||||
- [x] 5.5 编写属性测试:重叠检测正确性
|
||||
- **Property 6: 3+ 助教时间重叠时应检测到违规**
|
||||
- **Validates: Requirements 6.1**
|
||||
|
||||
- [x] 6. 检查点 — 确保惩罚计算和消费汇总测试通过
|
||||
- 运行属性测试:`cd C:\NeoZQYY && pytest tests/test_dws_contribution_properties.py -v`
|
||||
- 运行单元测试:`cd apps/etl/connectors/feiqiu && pytest tests/unit/ -k "contribution or penalty or consumption" -v`
|
||||
- 确保所有测试通过,如有问题请询问用户。
|
||||
|
||||
- [x] 7. RLS 视图与 FDW 映射
|
||||
- [x] 7.1 创建 `dws_assistant_order_contribution` 的 RLS 视图
|
||||
- 在测试库 `test_etl_feiqiu` 的 `app` schema 中创建 `v_dws_assistant_order_contribution` 视图
|
||||
- 使用 `current_setting('app.current_site_id')::bigint` 过滤
|
||||
- 授予 `app_reader` 角色 SELECT 权限
|
||||
- _Requirements: 7.1, 7.3_
|
||||
- [x] 7.2 验证已有 RLS 视图自动包含新增字段
|
||||
- 确认 `app.v_dws_member_consumption_summary` 和 `app.v_dws_assistant_daily_detail` 使用 `SELECT *`,新增字段自动包含
|
||||
- _Requirements: 7.2_
|
||||
- [x] 7.3 创建/更新 FDW 外部表映射
|
||||
- 在测试库 `test_zqyy_app` 的 `fdw_etl` schema 中创建 `dws_assistant_order_contribution` 外部表
|
||||
- 重建 `dws_member_consumption_summary` 和 `dws_assistant_daily_detail` 的 FDW 外部表以包含新字段
|
||||
- FDW 映射通过 `app` schema RLS 视图访问
|
||||
- _Requirements: 8.1, 8.2, 8.3_
|
||||
|
||||
- [x] 8. 影子跑数验证
|
||||
- [x] 8.1 编写验证脚本
|
||||
- 新建 `apps/etl/connectors/feiqiu/scripts/verify_dws_extensions.py`
|
||||
- 验证四项统计:对照 PRD 示例数据验算,检查 gross/net 各助教相等
|
||||
- 验证充值窗口:检查新增字段有值,充值金额与源数据一致
|
||||
- 验证惩罚字段:检查符合条件的记录正确填充
|
||||
- _Requirements: 9.1, 9.2, 9.3, 9.4_
|
||||
- [x] 8.2 编写数据库手册文档
|
||||
- 新建 `docs/database/BD_Manual_dws_assistant_order_contribution.md`
|
||||
- 包含表结构、字段说明、索引、验证 SQL(至少 3 条)、兼容性说明、回滚策略
|
||||
- 更新 `docs/database/` 中 `dws_member_consumption_summary` 和 `dws_assistant_daily_detail` 的文档
|
||||
- _Requirements: 1.1_
|
||||
|
||||
- [x] 9. 最终检查点 — 确保所有测试通过
|
||||
- 运行属性测试:`cd C:\NeoZQYY && pytest tests/test_dws_contribution_properties.py -v`
|
||||
- 运行单元测试:`cd apps/etl/connectors/feiqiu && pytest tests/unit/ -k "contribution or penalty or consumption" -v`
|
||||
- 确保所有测试通过,如有问题请询问用户。
|
||||
|
||||
## 备注
|
||||
|
||||
- 标记 `*` 的子任务为可选(属性测试),可跳过以加速 MVP
|
||||
- 每个任务引用具体需求编号以确保可追溯
|
||||
- 所有数据库操作在测试库(`test_etl_feiqiu` / `test_zqyy_app`)中进行
|
||||
- 检查点确保增量验证
|
||||
- 属性测试验证全称正确性属性,单元测试验证具体示例和边界情况
|
||||
1
.kiro/specs/03-miniapp-auth-system/.config.kiro
Normal file
1
.kiro/specs/03-miniapp-auth-system/.config.kiro
Normal file
@@ -0,0 +1 @@
|
||||
{"generationMode": "requirements-first"}
|
||||
859
.kiro/specs/03-miniapp-auth-system/design.md
Normal file
859
.kiro/specs/03-miniapp-auth-system/design.md
Normal file
@@ -0,0 +1,859 @@
|
||||
# 设计文档:小程序用户认证系统(miniapp-auth-system)
|
||||
|
||||
## 概述
|
||||
|
||||
本设计在 P1(miniapp-db-foundation)已建立的 `auth` Schema 基础上,实现完整的小程序用户认证链路:
|
||||
|
||||
1. **微信登录**:小程序端发送 `code` → 后端调用微信 `code2Session` → 获取 `openid` → 创建/查找用户 → 签发 JWT
|
||||
2. **用户申请**:新用户填写球房ID + 手机号 + 申请身份 → 系统自动匹配助教/员工 → 管理员审核
|
||||
3. **权限控制**:基于 `user_site_roles` + `role_permissions` 的 RBAC 模型,权限中间件拦截无权请求
|
||||
4. **多店铺支持**:一个用户可关联多个 `site_id`,切换店铺时重新签发 JWT
|
||||
|
||||
**环境变量依赖**:
|
||||
|
||||
| 环境变量 | 用途 | 来源 |
|
||||
|---------|------|------|
|
||||
| `APP_DB_DSN` / `DB_HOST` 等 | 业务库连接 | 根 `.env` |
|
||||
| `PG_DSN` / `ETL_DB_HOST` 等 | ETL 库连接(FDW 匹配) | 根 `.env` |
|
||||
| `JWT_SECRET_KEY` | JWT 签名密钥 | `.env.local` |
|
||||
| `WX_APPID` | 微信小程序 AppID | `.env.local` |
|
||||
| `WX_SECRET` | 微信小程序 AppSecret | `.env.local` |
|
||||
|
||||
**整体认证流程**:
|
||||
|
||||
```
|
||||
小程序端 FastAPI 后端 微信服务器
|
||||
│ │ │
|
||||
│── wx.login() ──► │ │
|
||||
│ 获取 code │ │
|
||||
│ │ │
|
||||
│── POST /api/xcx/login ──► │ │
|
||||
│ {code} │── GET code2Session ──────────► │
|
||||
│ │◄── {openid, session_key} ──── │
|
||||
│ │ │
|
||||
│ │── 查找/创建 auth.users ──► │
|
||||
│ │── 签发 JWT ──► │
|
||||
│◄── {jwt, status} ─────── │ │
|
||||
│ │ │
|
||||
│ [status=pending] │ │
|
||||
│── POST /api/xcx/apply ──► │ │
|
||||
│ {site_code, phone, ...} │── 创建 user_applications ──► │
|
||||
│◄── {application_id} ───── │ │
|
||||
```
|
||||
|
||||
## 架构
|
||||
|
||||
### 分层架构
|
||||
|
||||
```mermaid
|
||||
graph TB
|
||||
subgraph "小程序端"
|
||||
MP["微信小程序<br/>wx.login() / wx.request()"]
|
||||
end
|
||||
|
||||
subgraph "FastAPI 后端(apps/backend/)"
|
||||
subgraph "路由层"
|
||||
XCX_AUTH["routers/xcx_auth.py<br/>微信登录 + 申请"]
|
||||
XCX_USER["routers/xcx_user.py<br/>用户状态 + 店铺切换"]
|
||||
ADMIN_APP["routers/admin_applications.py<br/>管理端审核"]
|
||||
end
|
||||
|
||||
subgraph "中间件层"
|
||||
PERM_MW["middleware/permission.py<br/>权限中间件"]
|
||||
end
|
||||
|
||||
subgraph "服务层"
|
||||
WX_SVC["services/wechat.py<br/>code2Session 调用"]
|
||||
APP_SVC["services/application.py<br/>申请 CRUD + 审核"]
|
||||
MATCH_SVC["services/matching.py<br/>人员匹配"]
|
||||
ROLE_SVC["services/role.py<br/>角色权限查询"]
|
||||
end
|
||||
|
||||
subgraph "认证层(已有 + 扩展)"
|
||||
JWT["auth/jwt.py<br/>JWT 签发/验证(扩展)"]
|
||||
DEPS["auth/dependencies.py<br/>依赖注入(扩展)"]
|
||||
end
|
||||
|
||||
DB["database.py<br/>数据库连接"]
|
||||
end
|
||||
|
||||
subgraph "数据库"
|
||||
AUTH_SCHEMA["auth Schema<br/>users / applications / roles / ..."]
|
||||
FDW_ETL["fdw_etl Schema<br/>v_dim_assistant / v_dim_staff"]
|
||||
end
|
||||
|
||||
subgraph "外部服务"
|
||||
WX_API["微信 API<br/>code2Session"]
|
||||
end
|
||||
|
||||
MP --> XCX_AUTH
|
||||
MP --> XCX_USER
|
||||
XCX_AUTH --> PERM_MW
|
||||
XCX_USER --> PERM_MW
|
||||
ADMIN_APP --> PERM_MW
|
||||
PERM_MW --> JWT
|
||||
PERM_MW --> DEPS
|
||||
XCX_AUTH --> WX_SVC
|
||||
XCX_AUTH --> APP_SVC
|
||||
ADMIN_APP --> APP_SVC
|
||||
ADMIN_APP --> MATCH_SVC
|
||||
XCX_USER --> ROLE_SVC
|
||||
WX_SVC --> WX_API
|
||||
APP_SVC --> DB
|
||||
MATCH_SVC --> DB
|
||||
ROLE_SVC --> DB
|
||||
DB --> AUTH_SCHEMA
|
||||
DB --> FDW_ETL
|
||||
```
|
||||
|
||||
### 请求处理流程
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant MP as 小程序
|
||||
participant MW as Permission Middleware
|
||||
participant R as Router
|
||||
participant S as Service
|
||||
participant DB as PostgreSQL
|
||||
|
||||
MP->>R: POST /api/xcx/login {code}
|
||||
R->>S: wechat.code2session(code)
|
||||
S-->>R: openid
|
||||
R->>DB: SELECT FROM auth.users WHERE wx_openid = ?
|
||||
alt 用户不存在
|
||||
R->>DB: INSERT INTO auth.users
|
||||
end
|
||||
R->>R: jwt.create_token_pair(user_id, site_id)
|
||||
R-->>MP: {access_token, refresh_token, status}
|
||||
|
||||
Note over MP,DB: 后续请求(需认证)
|
||||
|
||||
MP->>MW: GET /api/xcx/... (Bearer token)
|
||||
MW->>MW: decode_access_token(token)
|
||||
MW->>DB: SELECT permissions FROM auth.user_site_roles JOIN ...
|
||||
alt 权限不足
|
||||
MW-->>MP: 403 Forbidden
|
||||
else 权限通过
|
||||
MW->>R: 放行
|
||||
R->>S: 业务逻辑
|
||||
S->>DB: 数据操作
|
||||
R-->>MP: 200 OK
|
||||
end
|
||||
```
|
||||
|
||||
## 组件与接口
|
||||
|
||||
### 组件 1:微信认证服务(services/wechat.py)
|
||||
|
||||
**职责**:封装微信 `code2Session` API 调用。
|
||||
|
||||
```python
|
||||
import httpx
|
||||
from app.config import get
|
||||
|
||||
WX_APPID: str = get("WX_APPID", "")
|
||||
WX_SECRET: str = get("WX_SECRET", "")
|
||||
CODE2SESSION_URL = "https://api.weixin.qq.com/sns/jscode2session"
|
||||
|
||||
async def code2session(code: str) -> dict:
|
||||
"""
|
||||
调用微信 code2Session 接口。
|
||||
|
||||
返回:
|
||||
{"openid": str, "session_key": str, "unionid": str | None}
|
||||
|
||||
异常:
|
||||
WeChatAuthError: 微信接口返回错误码时抛出
|
||||
"""
|
||||
...
|
||||
|
||||
class WeChatAuthError(Exception):
|
||||
"""微信认证错误,包含 errcode 和 errmsg。"""
|
||||
def __init__(self, errcode: int, errmsg: str): ...
|
||||
```
|
||||
|
||||
**设计决策**:
|
||||
- 使用 `httpx.AsyncClient` 异步调用微信 API,与 FastAPI 异步模型一致
|
||||
- `WX_APPID` / `WX_SECRET` 从环境变量读取,缺失时在调用时报错(而非启动时,因为非所有端点都需要微信认证)
|
||||
|
||||
### 组件 2:申请服务(services/application.py)
|
||||
|
||||
**职责**:处理用户申请的创建、查询、审核。
|
||||
|
||||
```python
|
||||
async def create_application(
|
||||
user_id: int,
|
||||
site_code: str,
|
||||
applied_role_text: str,
|
||||
phone: str,
|
||||
employee_number: str | None = None,
|
||||
nickname: str | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
创建用户申请。
|
||||
|
||||
1. 查找 site_code → site_id 映射
|
||||
2. 检查是否有 pending 申请(有则 409)
|
||||
3. 插入 user_applications 记录
|
||||
4. 更新 users.nickname(如提供)
|
||||
"""
|
||||
...
|
||||
|
||||
async def approve_application(
|
||||
application_id: int,
|
||||
reviewer_id: int,
|
||||
role_id: int,
|
||||
binding: dict | None = None, # {"assistant_id": ..., "staff_id": ..., "binding_type": ...}
|
||||
review_note: str | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
批准申请。
|
||||
|
||||
1. 检查申请状态为 pending(否则 409)
|
||||
2. 更新 user_applications.status = 'approved'
|
||||
3. 创建 user_site_roles 记录
|
||||
4. 创建 user_assistant_binding 记录(如有 binding)
|
||||
5. 更新 users.status = 'approved'(如果是首次通过)
|
||||
6. 记录 reviewer_id 和 reviewed_at
|
||||
"""
|
||||
...
|
||||
|
||||
async def reject_application(
|
||||
application_id: int,
|
||||
reviewer_id: int,
|
||||
review_note: str,
|
||||
) -> dict:
|
||||
"""
|
||||
拒绝申请。
|
||||
|
||||
1. 检查申请状态为 pending(否则 409)
|
||||
2. 更新 user_applications.status = 'rejected'
|
||||
3. 记录 reviewer_id、review_note、reviewed_at
|
||||
"""
|
||||
...
|
||||
|
||||
async def get_user_applications(user_id: int) -> list[dict]:
|
||||
"""查询用户的所有申请记录。"""
|
||||
...
|
||||
```
|
||||
|
||||
### 组件 3:人员匹配服务(services/matching.py)
|
||||
|
||||
**职责**:根据申请信息在 FDW 外部表中查找候选匹配。
|
||||
|
||||
```python
|
||||
async def find_candidates(
|
||||
site_id: int,
|
||||
phone: str,
|
||||
employee_number: str | None = None,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
在助教表和员工表中查找匹配候选。
|
||||
|
||||
查询逻辑:
|
||||
1. fdw_etl.v_dim_assistant: WHERE site_id = ? AND mobile = ?
|
||||
2. fdw_etl.v_dim_staff + v_dim_staff_ex: WHERE site_id = ? AND (mobile = ? OR job_num = ?)
|
||||
3. 合并结果,每条包含 source_type / name / mobile / job_num
|
||||
|
||||
注意:查询 FDW 外部表前需设置 app.current_site_id(RLS 隔离)。
|
||||
但 fdw_etl 中的外部表映射的是 app schema 的 RLS 视图,
|
||||
所以需要在 ETL 库连接上设置 site_id。
|
||||
实际上,我们直接在业务库通过 fdw_etl 查询,
|
||||
FDW 会透传 session 变量到远端。
|
||||
|
||||
返回:
|
||||
[{"source_type": "assistant"|"staff", "id": int, "name": str, "mobile": str, "job_num": str | None}]
|
||||
"""
|
||||
...
|
||||
```
|
||||
|
||||
**设计决策**:
|
||||
- FDW 查询需要在业务库连接上设置 `app.current_site_id`,因为 FDW 外部表映射的是 ETL 库 `app` Schema 的 RLS 视图
|
||||
- 匹配查询使用业务库连接(`get_connection()`),通过 `SET LOCAL app.current_site_id` 设置隔离
|
||||
- 如果 `site_code` 无法映射到 `site_id`,直接返回空列表
|
||||
|
||||
### 组件 4:角色权限服务(services/role.py)
|
||||
|
||||
**职责**:查询用户在指定店铺下的角色和权限。
|
||||
|
||||
```python
|
||||
async def get_user_permissions(user_id: int, site_id: int) -> list[str]:
|
||||
"""
|
||||
获取用户在指定 site_id 下的权限 code 列表。
|
||||
|
||||
SQL: SELECT DISTINCT p.code
|
||||
FROM auth.user_site_roles usr
|
||||
JOIN auth.role_permissions rp ON usr.role_id = rp.role_id
|
||||
JOIN auth.permissions p ON rp.permission_id = p.id
|
||||
WHERE usr.user_id = ? AND usr.site_id = ?
|
||||
"""
|
||||
...
|
||||
|
||||
async def get_user_sites(user_id: int) -> list[dict]:
|
||||
"""
|
||||
获取用户关联的所有店铺及对应角色。
|
||||
|
||||
返回: [{"site_id": int, "site_name": str, "roles": [{"code": str, "name": str}]}]
|
||||
"""
|
||||
...
|
||||
|
||||
async def check_user_has_site_role(user_id: int, site_id: int) -> bool:
|
||||
"""检查用户在指定 site_id 下是否有任何角色绑定。"""
|
||||
...
|
||||
```
|
||||
|
||||
### 组件 5:权限中间件(middleware/permission.py)
|
||||
|
||||
**职责**:基于 JWT 中的 `user_id` + `site_id` 检查用户权限。
|
||||
|
||||
```python
|
||||
from functools import wraps
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from app.auth.dependencies import get_current_user, CurrentUser
|
||||
|
||||
def require_permission(*permission_codes: str):
|
||||
"""
|
||||
权限装饰器/依赖,用于路由端点。
|
||||
|
||||
用法:
|
||||
@router.get("/finance")
|
||||
async def get_finance(
|
||||
user: CurrentUser = Depends(require_permission("view_board_finance"))
|
||||
):
|
||||
...
|
||||
|
||||
逻辑:
|
||||
1. 从 JWT 提取 user_id + site_id
|
||||
2. 查询 auth.users.status,非 approved 则 403
|
||||
3. 查询 user_site_roles + role_permissions 获取权限列表
|
||||
4. 检查是否包含所需权限,不包含则 403
|
||||
"""
|
||||
...
|
||||
|
||||
def require_approved():
|
||||
"""
|
||||
仅检查用户状态为 approved 的依赖(不检查具体权限)。
|
||||
用于通用的已认证端点。
|
||||
"""
|
||||
...
|
||||
```
|
||||
|
||||
**设计决策**:
|
||||
- 使用 FastAPI 依赖注入模式而非全局中间件,更灵活且可按端点配置
|
||||
- `pending` 用户只能访问申请提交和状态查询端点,其他端点需要 `approved` 状态
|
||||
- 权限检查结果可考虑短期缓存(当前版本不缓存,每次查库)
|
||||
|
||||
### 组件 6:JWT 服务扩展(auth/jwt.py 扩展)
|
||||
|
||||
**职责**:扩展现有 JWT 服务,支持微信登录场景。
|
||||
|
||||
**扩展内容**:
|
||||
|
||||
```python
|
||||
# 新增:创建受限令牌(pending 用户)
|
||||
def create_limited_token_pair(user_id: int) -> dict[str, str]:
|
||||
"""
|
||||
为 pending 用户签发受限令牌。
|
||||
payload 不含 site_id 和 roles,仅包含 user_id + type + limited=True。
|
||||
"""
|
||||
...
|
||||
|
||||
# 扩展:create_access_token payload 增加 roles 字段
|
||||
def create_access_token(user_id: int, site_id: int, roles: list[str] | None = None) -> str:
|
||||
"""
|
||||
生成 access_token。
|
||||
payload: sub=user_id, site_id, roles, type=access, exp
|
||||
"""
|
||||
...
|
||||
```
|
||||
|
||||
**设计决策**:
|
||||
- 保持向后兼容:现有 `create_access_token(user_id, site_id)` 调用不受影响(`roles` 默认 `None`)
|
||||
- `pending` 用户的受限令牌通过 `limited=True` 标记区分,权限中间件据此拦截
|
||||
|
||||
### 组件 7:路由端点
|
||||
|
||||
#### 7.1 小程序认证路由(routers/xcx_auth.py)
|
||||
|
||||
| 方法 | 路径 | 说明 | 认证要求 |
|
||||
|------|------|------|---------|
|
||||
| POST | `/api/xcx/login` | 微信登录 | 无(公开) |
|
||||
| POST | `/api/xcx/apply` | 提交申请 | JWT(含 pending) |
|
||||
| GET | `/api/xcx/me` | 查询自身状态 | JWT(含 pending) |
|
||||
| GET | `/api/xcx/me/sites` | 查询关联店铺 | JWT(approved) |
|
||||
| POST | `/api/xcx/switch-site` | 切换店铺 | JWT(approved) |
|
||||
| POST | `/api/xcx/refresh` | 刷新令牌 | refresh_token |
|
||||
|
||||
#### 7.2 管理端审核路由(routers/admin_applications.py)
|
||||
|
||||
| 方法 | 路径 | 说明 | 认证要求 |
|
||||
|------|------|------|---------|
|
||||
| GET | `/api/admin/applications` | 查询申请列表 | JWT + site_admin/tenant_admin |
|
||||
| GET | `/api/admin/applications/{id}` | 查询申请详情 + 候选匹配 | JWT + site_admin/tenant_admin |
|
||||
| POST | `/api/admin/applications/{id}/approve` | 批准申请 | JWT + site_admin/tenant_admin |
|
||||
| POST | `/api/admin/applications/{id}/reject` | 拒绝申请 | JWT + site_admin/tenant_admin |
|
||||
|
||||
### 组件 8:Pydantic 模型(schemas/xcx_auth.py)
|
||||
|
||||
```python
|
||||
from pydantic import BaseModel, Field
|
||||
import re
|
||||
|
||||
class WxLoginRequest(BaseModel):
|
||||
code: str = Field(..., min_length=1, description="微信临时登录凭证")
|
||||
|
||||
class WxLoginResponse(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str = "bearer"
|
||||
user_status: str # pending / approved / rejected / disabled
|
||||
user_id: int
|
||||
|
||||
class ApplicationRequest(BaseModel):
|
||||
site_code: str = Field(..., pattern=r"^[A-Za-z]{2}\d{3}$", description="球房ID")
|
||||
applied_role_text: str = Field(..., min_length=1, max_length=100)
|
||||
phone: str = Field(..., pattern=r"^\d{11}$", description="手机号")
|
||||
employee_number: str | None = Field(None, max_length=50)
|
||||
nickname: str | None = Field(None, max_length=50)
|
||||
|
||||
class ApplicationResponse(BaseModel):
|
||||
id: int
|
||||
site_code: str
|
||||
applied_role_text: str
|
||||
status: str
|
||||
review_note: str | None = None
|
||||
created_at: str
|
||||
reviewed_at: str | None = None
|
||||
|
||||
class UserStatusResponse(BaseModel):
|
||||
user_id: int
|
||||
status: str
|
||||
nickname: str | None
|
||||
applications: list[ApplicationResponse]
|
||||
|
||||
class SiteInfo(BaseModel):
|
||||
site_id: int
|
||||
site_name: str
|
||||
roles: list[dict]
|
||||
|
||||
class SwitchSiteRequest(BaseModel):
|
||||
site_id: int
|
||||
|
||||
class MatchCandidate(BaseModel):
|
||||
source_type: str # assistant / staff
|
||||
id: int
|
||||
name: str
|
||||
mobile: str | None
|
||||
job_num: str | None
|
||||
|
||||
class ApproveRequest(BaseModel):
|
||||
role_id: int
|
||||
binding: dict | None = None # {"assistant_id": ..., "staff_id": ..., "binding_type": ...}
|
||||
review_note: str | None = None
|
||||
|
||||
class RejectRequest(BaseModel):
|
||||
review_note: str = Field(..., min_length=1)
|
||||
```
|
||||
|
||||
## 数据模型
|
||||
|
||||
### ER 图
|
||||
|
||||
```mermaid
|
||||
erDiagram
|
||||
users {
|
||||
serial id PK
|
||||
varchar wx_openid UK
|
||||
varchar wx_union_id
|
||||
varchar wx_avatar_url
|
||||
varchar nickname
|
||||
varchar phone
|
||||
varchar status "pending/approved/rejected/disabled"
|
||||
timestamptz created_at
|
||||
timestamptz updated_at
|
||||
}
|
||||
|
||||
user_applications {
|
||||
serial id PK
|
||||
int user_id FK
|
||||
varchar site_code
|
||||
int site_id "可空,映射后填入"
|
||||
varchar applied_role_text
|
||||
varchar employee_number "可选"
|
||||
varchar phone
|
||||
varchar status "pending/approved/rejected"
|
||||
int reviewer_id
|
||||
text review_note
|
||||
timestamptz created_at
|
||||
timestamptz reviewed_at
|
||||
}
|
||||
|
||||
site_code_mapping {
|
||||
serial id PK
|
||||
varchar site_code UK "2字母+3数字"
|
||||
bigint site_id UK
|
||||
varchar site_name
|
||||
int tenant_id
|
||||
timestamptz created_at
|
||||
}
|
||||
|
||||
roles {
|
||||
serial id PK
|
||||
varchar code UK
|
||||
varchar name
|
||||
text description
|
||||
timestamptz created_at
|
||||
}
|
||||
|
||||
permissions {
|
||||
serial id PK
|
||||
varchar code UK
|
||||
varchar name
|
||||
text description
|
||||
timestamptz created_at
|
||||
}
|
||||
|
||||
role_permissions {
|
||||
int role_id FK
|
||||
int permission_id FK
|
||||
}
|
||||
|
||||
user_site_roles {
|
||||
serial id PK
|
||||
int user_id FK
|
||||
bigint site_id
|
||||
int role_id FK
|
||||
timestamptz created_at
|
||||
}
|
||||
|
||||
user_assistant_binding {
|
||||
serial id PK
|
||||
int user_id FK
|
||||
bigint site_id
|
||||
bigint assistant_id "可空"
|
||||
bigint staff_id "可空"
|
||||
varchar binding_type "assistant/staff/manager"
|
||||
timestamptz created_at
|
||||
}
|
||||
|
||||
users ||--o{ user_applications : "提交申请"
|
||||
users ||--o{ user_site_roles : "店铺角色"
|
||||
users ||--o{ user_assistant_binding : "人员绑定"
|
||||
roles ||--o{ user_site_roles : "角色分配"
|
||||
roles ||--o{ role_permissions : "角色权限"
|
||||
permissions ||--o{ role_permissions : "权限定义"
|
||||
site_code_mapping ||--o{ user_applications : "球房映射"
|
||||
```
|
||||
|
||||
### 表 DDL 概要
|
||||
|
||||
所有表在 `auth` Schema 下,迁移脚本位于 `db/zqyy_app/migrations/`。
|
||||
|
||||
**关键约束**:
|
||||
- `users.wx_openid` UNIQUE — 一个微信用户对应一条记录
|
||||
- `site_code_mapping.site_code` UNIQUE — 球房ID 唯一
|
||||
- `site_code_mapping.site_id` UNIQUE — site_id 唯一映射
|
||||
- `user_site_roles (user_id, site_id, role_id)` UNIQUE — 防止重复分配
|
||||
- `role_permissions (role_id, permission_id)` 联合主键
|
||||
|
||||
**索引**:
|
||||
- `users`: `ix_users_wx_openid` (wx_openid)
|
||||
- `user_applications`: `ix_user_applications_user_id` (user_id), `ix_user_applications_status` (status)
|
||||
- `user_site_roles`: `ix_user_site_roles_user_site` (user_id, site_id)
|
||||
- `site_code_mapping`: `ix_site_code_mapping_site_code` (site_code)
|
||||
|
||||
### 迁移脚本清单
|
||||
|
||||
| 序号 | 文件名 | 内容 |
|
||||
|------|--------|------|
|
||||
| 1 | `YYYY-MM-DD__p3_create_auth_tables.sql` | 创建 users / user_applications / site_code_mapping / roles / permissions / role_permissions / user_site_roles / user_assistant_binding |
|
||||
| 2 | `YYYY-MM-DD__p3_seed_roles_permissions.sql` | 种子数据:权限列表 + 默认角色 + 角色权限映射 |
|
||||
|
||||
|
||||
|
||||
## 正确性属性(Correctness Properties)
|
||||
|
||||
*属性是系统在所有有效执行中都应保持为真的特征或行为——本质上是关于系统应该做什么的形式化陈述。属性是人类可读规格与机器可验证正确性保证之间的桥梁。*
|
||||
|
||||
### Property 1:迁移脚本幂等性
|
||||
|
||||
*For any* 本次新增的迁移脚本(DDL + 种子数据),连续执行两次的结果应与执行一次相同——第二次执行不应产生错误,且数据库状态不变。
|
||||
|
||||
**Validates: Requirements 1.9, 2.4, 11.5**
|
||||
|
||||
### Property 2:登录创建/查找用户正确性
|
||||
|
||||
*For any* 有效的微信 `openid`,调用登录逻辑后:若该 `openid` 已存在于 `auth.users` 中,应返回已有用户的 `user_id`;若不存在,应创建新用户(status=`pending`)并返回新 `user_id`。无论哪种情况,返回的 JWT 中 `sub` 应等于该 `user_id`。
|
||||
|
||||
**Validates: Requirements 3.2, 3.3**
|
||||
|
||||
### Property 3:disabled 用户登录拒绝
|
||||
|
||||
*For any* `auth.users` 中 status 为 `disabled` 的用户,通过其 `openid` 登录时应返回 403 错误,不签发 JWT。
|
||||
|
||||
**Validates: Requirements 3.5**
|
||||
|
||||
### Property 4:申请创建正确性
|
||||
|
||||
*For any* 有效的申请数据(合法 `site_code` 格式、11 位手机号、非空 `applied_role_text`),提交申请后 `auth.user_applications` 中应新增一条 status=`pending` 的记录。若 `site_code` 在 `site_code_mapping` 中有映射,记录的 `site_id` 应等于映射值;若无映射,`site_id` 为 NULL 但申请仍成功。若提供了 `nickname`,`auth.users` 中该用户的 `nickname` 应更新。
|
||||
|
||||
**Validates: Requirements 4.1, 4.2, 4.3, 4.4**
|
||||
|
||||
### Property 5:手机号格式验证
|
||||
|
||||
*For any* 非 11 位纯数字的字符串作为 `phone` 提交申请,系统应返回 422 错误,`auth.user_applications` 中不应新增记录。
|
||||
|
||||
**Validates: Requirements 4.5**
|
||||
|
||||
### Property 6:重复申请拒绝
|
||||
|
||||
*For any* 已有一条 status=`pending` 申请的用户,再次提交申请时应返回 409 错误,`auth.user_applications` 中不应新增记录。
|
||||
|
||||
**Validates: Requirements 4.6**
|
||||
|
||||
### Property 7:人员匹配合并正确性
|
||||
|
||||
*For any* 有效的 `site_id` 和 `phone` 组合,匹配服务返回的候选列表应满足:(1) 每条候选的 `source_type` 为 `assistant` 或 `staff`;(2) 助教来源的候选来自 `v_dim_assistant` 表中 `site_id` 和 `mobile` 匹配的记录;(3) 员工来源的候选来自 `v_dim_staff` 表中 `site_id` 和 `mobile`(或 `job_num`)匹配的记录;(4) 列表是两个来源结果的并集,无遗漏。
|
||||
|
||||
**Validates: Requirements 5.1, 5.2, 5.3, 5.4**
|
||||
|
||||
### Property 8:审核操作正确性
|
||||
|
||||
*For any* status=`pending` 的申请:(1) 批准操作后,申请 status 变为 `approved`,`auth.user_site_roles` 中新增角色记录,`auth.users.status` 变为 `approved`,`reviewer_id` 和 `reviewed_at` 非空;(2) 若提供了 binding 信息,`auth.user_assistant_binding` 中新增绑定记录;(3) 拒绝操作后,申请 status 变为 `rejected`,`review_note` 非空,`reviewer_id` 和 `reviewed_at` 非空。
|
||||
|
||||
**Validates: Requirements 6.1, 6.2, 6.3, 6.4, 6.5**
|
||||
|
||||
### Property 9:非 pending 申请审核拒绝
|
||||
|
||||
*For any* status 不是 `pending` 的申请(`approved` / `rejected`),对其执行批准或拒绝操作应返回 409 错误,申请状态不变。
|
||||
|
||||
**Validates: Requirements 6.6**
|
||||
|
||||
### Property 10:用户状态查询完整性
|
||||
|
||||
*For any* 用户,查询自身状态应返回:(1) 用户的 `status` 字段;(2) 该用户所有申请记录的完整列表。若用户 status 为 `approved`,还应返回已关联的店铺列表和对应角色。
|
||||
|
||||
**Validates: Requirements 7.1, 7.2**
|
||||
|
||||
### Property 11:多店铺角色独立分配
|
||||
|
||||
*For any* 用户和多个不同的 `site_id`,系统应允许为该用户在每个 `site_id` 下独立分配不同的角色,且 `auth.user_site_roles` 中的记录互不干扰。
|
||||
|
||||
**Validates: Requirements 8.1**
|
||||
|
||||
### Property 12:店铺切换令牌正确性
|
||||
|
||||
*For any* 拥有多店铺绑定的 approved 用户,切换到目标 `site_id` 后签发的新 JWT 中 `site_id` 应等于目标值,`roles` 应等于该用户在目标 `site_id` 下的角色列表。若用户在目标 `site_id` 下无角色绑定,切换应失败。
|
||||
|
||||
**Validates: Requirements 8.2, 10.4**
|
||||
|
||||
### Property 13:权限中间件拦截正确性
|
||||
|
||||
*For any* 用户、`site_id` 和所需权限 code 的组合:(1) 若用户 status 非 `approved`,返回 403;(2) 若用户在该 `site_id` 下的权限列表不包含所需权限,返回 403;(3) 若用户在该 `site_id` 下拥有所需权限且 status 为 `approved`,放行。
|
||||
|
||||
**Validates: Requirements 8.3, 9.1, 9.2, 9.3**
|
||||
|
||||
### Property 14:JWT payload 结构与状态一致性
|
||||
|
||||
*For any* 通过登录签发的 JWT:(1) 解码后应包含 `sub`(user_id)、`type`、`exp` 字段;(2) 若用户 status 为 `approved`,payload 应包含 `site_id` 和 `roles`;(3) 若用户 status 为 `pending`,payload 应包含 `limited=True`,不含 `site_id` 和 `roles`。
|
||||
|
||||
**Validates: Requirements 10.1, 10.2, 10.3**
|
||||
|
||||
### Property 15:JWT 过期/无效令牌拒绝
|
||||
|
||||
*For any* 过期的 JWT 或被篡改的 JWT 字符串,权限中间件应返回 401 错误,不放行请求。
|
||||
|
||||
**Validates: Requirements 9.4**
|
||||
|
||||
## 错误处理
|
||||
|
||||
### API 错误码规范
|
||||
|
||||
| HTTP 状态码 | 场景 | 响应体 |
|
||||
|------------|------|--------|
|
||||
| 401 | JWT 无效/过期、微信 code2Session 失败 | `{"detail": "具体错误描述"}` |
|
||||
| 403 | 用户 disabled、权限不足、用户未 approved | `{"detail": "具体错误描述"}` |
|
||||
| 404 | 申请不存在 | `{"detail": "申请不存在"}` |
|
||||
| 409 | 重复提交 pending 申请、审核非 pending 申请 | `{"detail": "具体冲突描述"}` |
|
||||
| 422 | 请求体校验失败(手机号格式、site_code 格式等) | Pydantic 标准错误格式 |
|
||||
| 500 | 数据库连接失败、微信 API 超时 | `{"detail": "服务器内部错误"}` |
|
||||
|
||||
### 微信 API 错误处理
|
||||
|
||||
| 微信 errcode | 含义 | 处理方式 |
|
||||
|-------------|------|---------|
|
||||
| 0 | 成功 | 正常流程 |
|
||||
| 40029 | code 无效 | 返回 401,提示"登录凭证无效,请重新登录" |
|
||||
| 45011 | 频率限制 | 返回 429,提示"请求过于频繁" |
|
||||
| 40226 | 高风险用户 | 返回 403,提示"账号存在风险" |
|
||||
| 其他 | 未知错误 | 返回 401,记录日志,提示"微信登录失败" |
|
||||
|
||||
### 数据库错误处理
|
||||
|
||||
| 场景 | 处理方式 |
|
||||
|------|---------|
|
||||
| 连接失败 | 捕获 `psycopg2.OperationalError`,返回 500 |
|
||||
| 唯一约束冲突(wx_openid) | 并发创建时捕获 `UniqueViolation`,改为查询已有记录 |
|
||||
| 外键约束失败 | 返回 422,提示具体的关联数据不存在 |
|
||||
| FDW 查询失败 | 捕获异常,匹配服务返回空列表,记录日志 |
|
||||
|
||||
### 环境变量缺失处理
|
||||
|
||||
| 变量 | 缺失时行为 |
|
||||
|------|-----------|
|
||||
| `WX_APPID` / `WX_SECRET` | 微信登录端点返回 500,日志记录"微信配置缺失" |
|
||||
| `JWT_SECRET_KEY` | 应用启动时警告(空字符串不安全),JWT 签发/验证使用空密钥(仅开发环境) |
|
||||
| `DB_HOST` 等数据库参数 | 数据库连接失败,返回 500 |
|
||||
|
||||
## 测试策略
|
||||
|
||||
### DDL 测试库落库与文档同步
|
||||
|
||||
DDL 变更必须经过以下流程:
|
||||
|
||||
1. **测试库执行**:在 `test_zqyy_app` 中执行迁移脚本,验证无错误
|
||||
2. **幂等性验证**:连续执行两次,第二次无错误
|
||||
3. **数据库手册更新**:创建/更新 `docs/database/BD_Manual_auth_tables.md`,格式参照现有 `BD_Manual_auth_biz_schemas.md`
|
||||
4. **DDL 基线刷新**:运行 `python scripts/ops/gen_consolidated_ddl.py` 重新生成 `docs/database/ddl/zqyy_app__auth.sql`
|
||||
|
||||
### 小程序认证前端页面
|
||||
|
||||
#### 页面清单
|
||||
|
||||
| 页面 | 路径 | 说明 | H5 原型 |
|
||||
|------|------|------|---------|
|
||||
| login | `pages/login/login` | 微信登录页(自动调用 wx.login) | `docs/h5_ui/pages/login.html` |
|
||||
| apply | `pages/apply/apply` | 申请表单页(球房ID + 手机号 + 身份 + 编号 + 昵称) | `docs/h5_ui/pages/apply.html` |
|
||||
| reviewing | `pages/reviewing/reviewing` | 审核等待页(显示状态 + 申请摘要) | `docs/h5_ui/pages/reviewing.html` |
|
||||
| no-permission | `pages/no-permission/no-permission` | 无权限/已禁用页 | `docs/h5_ui/pages/no-permission.html` |
|
||||
|
||||
#### 认证路由流程
|
||||
|
||||
```
|
||||
app.ts onLaunch()
|
||||
│
|
||||
├── wx.login() → 获取 code
|
||||
│
|
||||
├── POST /api/xcx/login {code}
|
||||
│ │
|
||||
│ ├── 返回 user_status = "approved"
|
||||
│ │ └── 跳转主页(task-list 或 home)
|
||||
│ │
|
||||
│ ├── 返回 user_status = "pending"
|
||||
│ │ ├── 查询 /api/xcx/me → 有 pending 申请
|
||||
│ │ │ └── 跳转 reviewing 页面
|
||||
│ │ └── 查询 /api/xcx/me → 无 pending 申请
|
||||
│ │ └── 跳转 apply 页面
|
||||
│ │
|
||||
│ ├── 返回 user_status = "rejected"
|
||||
│ │ └── 跳转 reviewing 页面(显示拒绝原因 + 重新申请按钮)
|
||||
│ │
|
||||
│ └── 返回 403(disabled)
|
||||
│ └── 跳转 no-permission 页面
|
||||
│
|
||||
└── 登录失败(网络错误等)
|
||||
└── 显示错误提示,提供重试按钮
|
||||
```
|
||||
|
||||
#### app.ts 全局状态管理
|
||||
|
||||
```typescript
|
||||
// globalData 扩展
|
||||
interface IAppOption {
|
||||
globalData: {
|
||||
userInfo?: {
|
||||
userId: number;
|
||||
status: string; // pending / approved / rejected / disabled
|
||||
nickname?: string;
|
||||
};
|
||||
token?: string;
|
||||
refreshToken?: string;
|
||||
currentSiteId?: number;
|
||||
sites?: Array<{ siteId: number; siteName: string; roles: string[] }>;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
#### 请求封装(utils/request.ts)
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* 统一请求封装:
|
||||
* 1. 自动附加 Authorization: Bearer <token>
|
||||
* 2. 401 时自动尝试 refresh_token 刷新
|
||||
* 3. 刷新失败时跳转 login 页面
|
||||
*/
|
||||
function request(options: RequestOptions): Promise<any> { ... }
|
||||
```
|
||||
|
||||
### 开发模式联调
|
||||
|
||||
#### Mock 登录端点
|
||||
|
||||
后端在 `WX_DEV_MODE=true` 时注册 `POST /api/xcx/dev-login`:
|
||||
|
||||
```python
|
||||
@router.post("/api/xcx/dev-login")
|
||||
async def dev_login(openid: str, status: str = "approved"):
|
||||
"""
|
||||
开发模式 mock 登录。
|
||||
直接根据 openid 查找/创建用户,跳过微信 code2Session。
|
||||
可通过 status 参数模拟不同用户状态。
|
||||
仅在 WX_DEV_MODE=true 时可用。
|
||||
"""
|
||||
...
|
||||
```
|
||||
|
||||
#### 微信开发者工具联调步骤
|
||||
|
||||
联调指南文档位于 `apps/miniprogram/doc/auth-integration-guide.md`,包含:
|
||||
1. 微信开发者工具项目导入配置(appid、不校验合法域名)
|
||||
2. 后端启动命令(`cd apps/backend && uvicorn app.main:app --reload`)
|
||||
3. 小程序请求域名配置(开发环境指向 `http://localhost:8000`)
|
||||
4. 测试流程:登录 → 申请 → 管理端审核 → 重新登录验证
|
||||
5. Mock 模式使用说明
|
||||
|
||||
### 属性测试(Property-Based Testing)
|
||||
|
||||
使用 Python `hypothesis` 框架,测试目录:`tests/`(Monorepo 级属性测试目录)。
|
||||
|
||||
每个属性测试至少运行 100 次迭代。每个测试用注释标注对应的设计属性编号。
|
||||
|
||||
标注格式:`# Feature: miniapp-auth-system, Property N: <属性标题>`
|
||||
|
||||
**属性测试清单**:
|
||||
|
||||
| 属性 | 测试文件 | 测试方法 | 生成器 |
|
||||
|------|---------|---------|--------|
|
||||
| P2 登录创建/查找用户 | `tests/test_auth_system_properties.py` | 生成随机 openid,模拟登录,验证用户创建/查找逻辑 | `hypothesis.strategies.text` 生成 openid |
|
||||
| P4 申请创建正确性 | `tests/test_auth_system_properties.py` | 生成随机合法申请数据,验证申请记录创建 | 自定义 strategy 生成 site_code(2字母+3数字)、phone(11位数字) |
|
||||
| P5 手机号格式验证 | `tests/test_auth_system_properties.py` | 生成随机非法手机号,验证 422 拒绝 | `hypothesis.strategies.text` 过滤非 11 位数字 |
|
||||
| P6 重复申请拒绝 | `tests/test_auth_system_properties.py` | 生成随机用户+申请,提交两次,验证第二次 409 | 复用申请数据生成器 |
|
||||
| P7 人员匹配合并 | `tests/test_auth_system_properties.py` | 生成随机助教/员工数据,验证匹配结果合并 | 自定义 strategy 生成匹配数据 |
|
||||
| P8 审核操作正确性 | `tests/test_auth_system_properties.py` | 生成随机 pending 申请,执行批准/拒绝,验证状态流转 | 自定义 strategy 生成审核数据 |
|
||||
| P9 非 pending 审核拒绝 | `tests/test_auth_system_properties.py` | 生成随机非 pending 申请,验证 409 | `hypothesis.strategies.sampled_from(["approved", "rejected"])` |
|
||||
| P11 多店铺角色独立 | `tests/test_auth_system_properties.py` | 生成随机用户+多个 site_id,验证角色独立分配 | `hypothesis.strategies.lists` 生成 site_id 列表 |
|
||||
| P12 店铺切换令牌 | `tests/test_auth_system_properties.py` | 生成多店铺用户,切换店铺,验证 JWT 内容 | 复用多店铺生成器 |
|
||||
| P13 权限中间件拦截 | `tests/test_auth_system_properties.py` | 生成随机用户+权限组合,验证中间件判断 | 自定义 strategy 生成权限矩阵 |
|
||||
| P14 JWT payload 结构 | `tests/test_auth_system_properties.py` | 生成随机用户(不同 status),签发 JWT,验证 payload | `hypothesis.strategies.sampled_from(["pending", "approved"])` |
|
||||
| P15 JWT 过期/无效拒绝 | `tests/test_auth_system_properties.py` | 生成随机过期/篡改 JWT,验证 401 | 自定义 strategy 生成无效 JWT |
|
||||
|
||||
**注意**:P1(迁移幂等性)、P3(disabled 登录拒绝)、P10(状态查询完整性)作为集成测试在后端测试目录实现,因为它们需要真实数据库环境或涉及具体的数据库状态验证。
|
||||
|
||||
### 单元测试
|
||||
|
||||
单元测试位于 `apps/backend/tests/`,聚焦于:
|
||||
|
||||
- `test_xcx_auth_router.py`:微信登录路由测试(mock 微信 API)
|
||||
- `test_application_service.py`:申请服务的边界情况
|
||||
- `test_matching_service.py`:匹配逻辑的边界情况(空结果、FDW 异常)
|
||||
- `test_permission_middleware.py`:权限中间件的各种组合
|
||||
- `test_jwt_extended.py`:扩展 JWT 的 limited token 逻辑
|
||||
|
||||
### 集成测试
|
||||
|
||||
集成测试通过验证脚本实现,覆盖:
|
||||
|
||||
- 迁移脚本幂等性验证(执行两次无错误)
|
||||
- 种子数据完整性验证(权限和角色数量正确)
|
||||
- 完整认证流程:登录 → 申请 → 审核 → 权限验证
|
||||
196
.kiro/specs/03-miniapp-auth-system/requirements.md
Normal file
196
.kiro/specs/03-miniapp-auth-system/requirements.md
Normal file
@@ -0,0 +1,196 @@
|
||||
# 需求文档:小程序用户认证系统(miniapp-auth-system)
|
||||
|
||||
## 简介
|
||||
|
||||
本 SPEC 实现小程序用户认证系统,涵盖微信登录、用户申请审核、人员匹配、多店铺权限管理等完整认证链路。系统基于 P1(miniapp-db-foundation)已建立的 `auth` Schema 和 FDW 映射,在 `test_zqyy_app.auth` 中创建用户、申请、角色、绑定等业务表,并在 FastAPI 后端实现对应的 API 端点和权限中间件。
|
||||
|
||||
## 术语表
|
||||
|
||||
- **Auth_System**:小程序用户认证系统,负责微信登录、用户管理、申请审核、权限控制的完整后端服务
|
||||
- **WeChat_Auth_Service**:微信认证服务模块,负责调用微信 `code2Session` 接口换取 `openid` 和 `session_key`
|
||||
- **Application_Service**:用户申请服务模块,负责处理用户提交的入驻申请、状态流转和审核操作
|
||||
- **Matching_Service**:人员匹配服务模块,负责根据球房ID和手机号/编号在助教表和员工表中查找候选匹配
|
||||
- **Permission_Middleware**:权限中间件,基于用户的 `site_id` + `role` 拦截无权请求
|
||||
- **JWT_Service**:JWT 令牌服务,负责签发和刷新 access_token / refresh_token(已有实现,本 SPEC 扩展)
|
||||
- **site_code**:球房ID,格式为 2 字母 + 3 数字(如 `AB123`),与 `site_id` 一一映射
|
||||
- **site_id**:门店标识符,类型为 `BIGINT`,用于多门店数据隔离
|
||||
- **user_status**:用户状态枚举,取值为 `pending`(审核中)/ `approved`(已通过)/ `rejected`(已拒绝)/ `disabled`(已禁用)
|
||||
- **binding_type**:绑定类型枚举,取值为 `assistant`(助教)/ `staff`(员工)/ `manager`(管理员)
|
||||
- **FDW**:`postgres_fdw` 外部数据包装器,通过 `fdw_etl` Schema 读取 ETL 库数据
|
||||
- **Migration_Script**:存放在 `db/zqyy_app/migrations/` 中的纯 SQL 迁移脚本,以日期前缀命名
|
||||
- **BD_Manual**:数据库手册文档,存放在 `docs/database/` 中,记录表结构变更、兼容性影响、回滚策略和验证 SQL
|
||||
- **DDL_Baseline**:DDL 基线文件,存放在 `docs/database/ddl/` 中,由 `gen_consolidated_ddl.py` 自动生成
|
||||
- **Miniprogram_Auth_Pages**:小程序认证相关前端页面,包括登录页、申请表单页、审核等待页、无权限页
|
||||
- **Dev_Login**:开发模式下的 mock 登录端点,绕过微信 code2Session 调用,用于联调测试
|
||||
|
||||
## 需求
|
||||
|
||||
### 需求 1:认证数据表创建
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要在 `auth` Schema 中创建用户认证相关的数据表,以便支撑完整的认证和权限管理功能。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN Migration_Script 执行完成, THE Auth_System SHALL 在 `auth` Schema 中创建 `users` 表,包含 `id`(SERIAL PK)、`wx_openid`(UNIQUE)、`wx_union_id`、`wx_avatar_url`、`nickname`、`phone`、`status`(默认 `pending`)、`created_at`、`updated_at` 字段
|
||||
2. WHEN Migration_Script 执行完成, THE Auth_System SHALL 在 `auth` Schema 中创建 `user_applications` 表,包含 `id`(SERIAL PK)、`user_id`(FK → users)、`site_code`、`applied_role_text`、`employee_number`(可选)、`phone`、`status`(默认 `pending`)、`reviewer_id`、`review_note`、`created_at`、`reviewed_at` 字段
|
||||
3. WHEN Migration_Script 执行完成, THE Auth_System SHALL 在 `auth` Schema 中创建 `site_code_mapping` 表,包含 `id`(SERIAL PK)、`site_code`(UNIQUE,格式 2 字母 + 3 数字)、`site_id`(BIGINT UNIQUE)、`site_name`、`tenant_id`、`created_at` 字段
|
||||
4. WHEN Migration_Script 执行完成, THE Auth_System SHALL 在 `auth` Schema 中创建 `user_site_roles` 表,包含 `id`(SERIAL PK)、`user_id`(FK → users)、`site_id`(BIGINT)、`role_id`(FK → roles)、`created_at` 字段,并对 `(user_id, site_id, role_id)` 建立唯一约束
|
||||
5. WHEN Migration_Script 执行完成, THE Auth_System SHALL 在 `auth` Schema 中创建 `user_assistant_binding` 表,包含 `id`(SERIAL PK)、`user_id`(FK → users)、`site_id`(BIGINT)、`assistant_id`(BIGINT,可选)、`staff_id`(BIGINT,可选)、`binding_type`、`created_at` 字段
|
||||
6. WHEN Migration_Script 执行完成, THE Auth_System SHALL 在 `auth` Schema 中创建 `roles` 表,包含 `id`(SERIAL PK)、`code`(UNIQUE)、`name`、`description`、`created_at` 字段
|
||||
7. WHEN Migration_Script 执行完成, THE Auth_System SHALL 在 `auth` Schema 中创建 `permissions` 表,包含 `id`(SERIAL PK)、`code`(UNIQUE)、`name`、`description`、`created_at` 字段
|
||||
8. WHEN Migration_Script 执行完成, THE Auth_System SHALL 在 `auth` Schema 中创建 `role_permissions` 表,包含 `role_id`(FK → roles)、`permission_id`(FK → permissions)字段,并以 `(role_id, permission_id)` 为联合主键
|
||||
9. THE Migration_Script SHALL 使用 `IF NOT EXISTS` / `OR REPLACE` 等幂等语法,确保重复执行不会报错
|
||||
10. THE Migration_Script SHALL 在脚本中包含回滚语句(以注释形式)
|
||||
|
||||
### 需求 2:种子数据预置
|
||||
|
||||
**用户故事:** 作为系统管理员,我需要系统预置固定的权限列表和默认角色,以便审核时可直接分配。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 种子数据脚本执行完成, THE Auth_System SHALL 在 `auth.permissions` 表中插入 5 条固定权限记录:`view_tasks`、`view_board`、`view_board_finance`、`view_board_customer`、`view_board_coach`
|
||||
2. WHEN 种子数据脚本执行完成, THE Auth_System SHALL 在 `auth.roles` 表中插入默认角色(至少包含 `coach`(助教)、`staff`(员工)、`site_admin`(店铺管理员)、`tenant_admin`(租户管理员))
|
||||
3. WHEN 种子数据脚本执行完成, THE Auth_System SHALL 在 `auth.role_permissions` 表中为每个默认角色分配对应的权限组合
|
||||
4. THE 种子数据脚本 SHALL 使用 `ON CONFLICT DO NOTHING` 语法,确保重复执行不会产生重复数据
|
||||
|
||||
### 需求 3:微信登录
|
||||
|
||||
**用户故事:** 作为球房工作人员,我需要通过微信登录小程序,以便快速进入系统。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 小程序端发送微信临时登录凭证(`code`), THE WeChat_Auth_Service SHALL 调用微信 `code2Session` 接口换取 `openid` 和 `session_key`
|
||||
2. WHEN `code2Session` 返回有效 `openid` 且该 `openid` 已存在于 `auth.users` 表中, THE Auth_System SHALL 返回该用户的 JWT 令牌对(access_token + refresh_token)和用户状态信息
|
||||
3. WHEN `code2Session` 返回有效 `openid` 且该 `openid` 不存在于 `auth.users` 表中, THE Auth_System SHALL 创建新用户记录(status 为 `pending`),返回 JWT 令牌对和 `pending` 状态标识
|
||||
4. IF `code2Session` 接口调用失败或返回错误码, THEN THE WeChat_Auth_Service SHALL 返回 HTTP 401 错误,包含具体的错误描述
|
||||
5. WHEN 用户状态为 `disabled`, THE Auth_System SHALL 返回 HTTP 403 错误,拒绝登录
|
||||
|
||||
### 需求 4:用户申请提交
|
||||
|
||||
**用户故事:** 作为球房工作人员,我需要在首次登录后填写申请表单(球房ID、申请身份、手机号、编号、昵称),以便管理员审核我的身份。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 用户提交申请表单(包含 `site_code`、`applied_role_text`、`phone`,可选 `employee_number`), THE Application_Service SHALL 在 `auth.user_applications` 表中创建一条 status 为 `pending` 的申请记录
|
||||
2. WHEN 用户提交的 `site_code` 在 `auth.site_code_mapping` 中存在映射, THE Application_Service SHALL 将申请记录关联到对应的 `site_id`
|
||||
3. WHEN 用户提交的 `site_code` 在 `auth.site_code_mapping` 中不存在映射, THE Application_Service SHALL 仍然接受申请,申请记录中保留 `site_code` 文本,管理端显示"未找到关联信息"
|
||||
4. WHEN 用户提交申请时提供了 `nickname`, THE Auth_System SHALL 更新 `auth.users` 表中该用户的 `nickname` 字段
|
||||
5. IF 用户提交的 `phone` 为空或格式无效(非 11 位数字), THEN THE Application_Service SHALL 返回 HTTP 422 错误,包含具体的校验失败信息
|
||||
6. WHEN 用户已有一条 `pending` 状态的申请, THE Application_Service SHALL 拒绝重复提交,返回 HTTP 409 错误
|
||||
|
||||
### 需求 5:人员匹配
|
||||
|
||||
**用户故事:** 作为系统,我需要根据球房ID和手机号自动建议用户与助教/员工的对应关系,以便管理员快速审核。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 管理员查看某条申请详情时, THE Matching_Service SHALL 根据申请中的 `site_id` 和 `phone` 在 `fdw_etl.v_dim_assistant` 中按 `site_id` + `mobile` 匹配助教记录
|
||||
2. WHEN 管理员查看某条申请详情时, THE Matching_Service SHALL 根据申请中的 `site_id` 和 `phone` 在 `fdw_etl.v_dim_staff` 和 `fdw_etl.v_dim_staff_ex` 中按 `site_id` + `mobile` 匹配员工记录
|
||||
3. WHEN 申请中包含 `employee_number`, THE Matching_Service SHALL 额外按 `job_num` 字段匹配员工记录
|
||||
4. THE Matching_Service SHALL 将助教匹配结果和员工匹配结果合并为统一的候选列表返回,每条候选包含来源类型(`assistant` / `staff`)、姓名、手机号、编号
|
||||
5. WHEN 助教表和员工表均无匹配结果, THE Matching_Service SHALL 返回空候选列表,管理端显示"未找到关联信息"
|
||||
6. WHEN 申请的 `site_code` 无法映射到 `site_id`, THE Matching_Service SHALL 跳过匹配,返回空候选列表
|
||||
|
||||
### 需求 6:申请审核
|
||||
|
||||
**用户故事:** 作为租户管理员,我需要审核用户申请,将用户关联到对应的助教/员工,并分配身份权限。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 管理员批准申请并选择了候选匹配对象, THE Application_Service SHALL 将申请状态更新为 `approved`,在 `auth.user_assistant_binding` 中创建绑定记录,在 `auth.user_site_roles` 中分配角色
|
||||
2. WHEN 管理员批准申请但无候选匹配(手动审核), THE Application_Service SHALL 将申请状态更新为 `approved`,仅在 `auth.user_site_roles` 中分配角色,不创建绑定记录
|
||||
3. WHEN 管理员拒绝申请, THE Application_Service SHALL 将申请状态更新为 `rejected`,记录 `review_note`(拒绝原因)
|
||||
4. WHEN 申请审核通过后, THE Auth_System SHALL 将 `auth.users` 表中该用户的 `status` 更新为 `approved`
|
||||
5. WHEN 审核操作完成, THE Application_Service SHALL 记录 `reviewer_id` 和 `reviewed_at` 时间戳
|
||||
6. IF 审核目标申请的状态不是 `pending`, THEN THE Application_Service SHALL 返回 HTTP 409 错误,拒绝重复审核
|
||||
|
||||
### 需求 7:用户状态查询
|
||||
|
||||
**用户故事:** 作为用户,我需要看到自己的申请状态(审核中/通过/拒绝),以便了解审核进度。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 用户查询自身状态, THE Auth_System SHALL 返回用户的 `status`、所有申请记录列表(含每条申请的 `site_code`、`applied_role_text`、`status`、`review_note`)
|
||||
2. WHEN 用户状态为 `approved`, THE Auth_System SHALL 同时返回用户已关联的店铺列表和对应角色
|
||||
|
||||
### 需求 8:多店铺支持与店铺切换
|
||||
|
||||
**用户故事:** 作为用户,我可以同时属于多个店铺(连锁场景),切换店铺后数据正确隔离。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE Auth_System SHALL 允许一个用户通过多次申请关联到多个不同的 `site_id`,每个 `site_id` 独立分配角色
|
||||
2. WHEN 用户切换当前店铺, THE JWT_Service SHALL 签发包含新 `site_id` 的 JWT 令牌对
|
||||
3. WHEN 用户携带某 `site_id` 的 JWT 访问 API, THE Permission_Middleware SHALL 仅允许访问该 `site_id` 下用户拥有权限的资源
|
||||
|
||||
### 需求 9:权限中间件
|
||||
|
||||
**用户故事:** 作为系统,我需要权限中间件正确拦截无权请求,确保数据安全。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 用户携带有效 JWT 访问受保护端点, THE Permission_Middleware SHALL 从 JWT 中提取 `user_id` 和 `site_id`,查询 `auth.user_site_roles` 和 `auth.role_permissions` 获取用户在该店铺下的权限列表
|
||||
2. WHEN 用户的权限列表不包含端点所需的权限 code, THE Permission_Middleware SHALL 返回 HTTP 403 错误
|
||||
3. WHEN 用户的 `status` 不是 `approved`, THE Permission_Middleware SHALL 返回 HTTP 403 错误,拒绝访问受保护端点
|
||||
4. WHEN JWT 令牌过期或无效, THE Permission_Middleware SHALL 返回 HTTP 401 错误
|
||||
|
||||
### 需求 10:JWT 令牌扩展
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要扩展现有 JWT 服务以支持微信登录场景和多店铺切换。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE JWT_Service SHALL 在 JWT payload 中包含 `user_id`(sub)、`site_id`、`roles`(角色 code 列表)、`type`(access/refresh)、`exp` 字段
|
||||
2. WHEN 用户通过微信登录且状态为 `approved`, THE JWT_Service SHALL 使用用户默认店铺(第一个关联的 site_id)签发令牌
|
||||
3. WHEN 用户通过微信登录且状态为 `pending`, THE JWT_Service SHALL 签发不含 `site_id` 和 `roles` 的受限令牌,仅允许访问申请提交和状态查询端点
|
||||
4. WHEN 用户请求切换店铺, THE JWT_Service SHALL 验证用户在目标 `site_id` 下有角色绑定后签发新令牌
|
||||
|
||||
### 需求 11:迁移脚本管理
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要所有数据库变更都有对应的迁移脚本,以便变更可追溯、可重放。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE Migration_Script SHALL 将所有认证相关表的 DDL 存放在 `db/zqyy_app/migrations/` 目录中
|
||||
2. THE Migration_Script SHALL 使用日期前缀命名(格式:`YYYY-MM-DD__<描述>.sql`)
|
||||
3. THE Migration_Script SHALL 使用 UTF-8 编码,纯 SQL(非 ORM)
|
||||
4. THE Migration_Script SHALL 在每个脚本中包含回滚语句(以注释形式)
|
||||
5. THE Migration_Script SHALL 使用幂等语法(`IF NOT EXISTS`、`ON CONFLICT DO NOTHING`),确保重复执行不会报错
|
||||
|
||||
### 需求 12:DDL 测试库落库与文档同步
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要所有 DDL 变更在测试库(`test_zqyy_app`)中实际执行验证,并同步更新数据库手册和 DDL 基线,确保文档与实际 Schema 一致。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 迁移脚本编写完成, THE Auth_System SHALL 在 `test_zqyy_app` 测试库中执行迁移脚本,验证无错误
|
||||
2. WHEN 迁移脚本执行成功, THE Auth_System SHALL 创建或更新 `docs/database/BD_Manual_auth_tables.md` 数据库手册,包含变更说明、兼容性影响、回滚策略、验证 SQL(至少 3 条)
|
||||
3. WHEN 迁移脚本执行成功, THE Auth_System SHALL 运行 `python scripts/ops/gen_consolidated_ddl.py` 重新生成 DDL 基线文件 `docs/database/ddl/zqyy_app__auth.sql`
|
||||
4. WHEN 种子数据脚本执行成功, THE Auth_System SHALL 在数据库手册中记录种子数据内容(角色、权限、角色-权限映射)
|
||||
|
||||
### 需求 13:小程序认证前端页面
|
||||
|
||||
**用户故事:** 作为球房工作人员,我需要在小程序中看到登录页、申请表单页、审核状态页,以便完成从微信登录到正式使用的完整流程。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 用户首次打开小程序, THE Auth_System SHALL 展示登录页面,调用 `wx.login()` 获取 code 并发送到后端 `/api/xcx/login`
|
||||
2. WHEN 后端返回 `user_status=pending` 且用户无 pending 申请, THE Auth_System SHALL 跳转到申请表单页面,包含球房ID(`site_code`)、申请身份、手机号、编号(选填)、昵称输入框
|
||||
3. WHEN 用户提交申请表单, THE Auth_System SHALL 调用 `/api/xcx/apply` 提交申请,成功后跳转到审核等待页面
|
||||
4. WHEN 用户状态为 `pending` 且已有 pending 申请, THE Auth_System SHALL 展示审核等待页面,显示"审核中"状态和申请信息摘要
|
||||
5. WHEN 用户状态为 `rejected`, THE Auth_System SHALL 在审核等待页面显示拒绝原因,并提供"重新申请"按钮
|
||||
6. WHEN 用户状态为 `approved`, THE Auth_System SHALL 跳转到小程序主页(任务列表)
|
||||
7. WHEN 用户状态为 `disabled`, THE Auth_System SHALL 展示无权限页面,提示账号已被禁用
|
||||
8. THE Auth_System SHALL 在小程序 `app.ts` 的 `onLaunch` 中实现自动登录逻辑,根据用户状态路由到对应页面
|
||||
9. WHEN 用户拥有多个店铺, THE Auth_System SHALL 在主页提供店铺切换入口
|
||||
|
||||
### 需求 14:前后端联调验证
|
||||
|
||||
**用户故事:** 作为开发者,我需要在微信开发者工具中验证完整的认证流程(登录→申请→审核→进入主页),确保前后端接口对接正确。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE Auth_System SHALL 提供联调验证脚本或文档,说明如何在微信开发者工具中测试完整认证流程
|
||||
2. THE Auth_System SHALL 在后端提供开发模式下的 mock 登录端点(`POST /api/xcx/dev-login`),接受任意 openid 直接返回 JWT,绕过微信 code2Session 调用
|
||||
3. WHEN 开发模式启用时, THE Auth_System SHALL 允许通过环境变量 `WX_DEV_MODE=true` 切换到 mock 模式
|
||||
4. THE Auth_System SHALL 在 `apps/miniprogram/doc/` 中提供联调指南文档,包含微信开发者工具配置、后端启动步骤、测试账号说明
|
||||
262
.kiro/specs/03-miniapp-auth-system/tasks.md
Normal file
262
.kiro/specs/03-miniapp-auth-system/tasks.md
Normal file
@@ -0,0 +1,262 @@
|
||||
# 实现计划:小程序用户认证系统(miniapp-auth-system)
|
||||
|
||||
## 概述
|
||||
|
||||
基于已批准的需求和设计文档,将小程序用户认证系统拆分为增量式编码任务。每个任务构建在前一个任务之上,最终完成完整的认证链路。后端使用 Python + FastAPI,数据库使用 PostgreSQL 纯 SQL,属性测试使用 hypothesis。
|
||||
|
||||
## 任务
|
||||
|
||||
- [x] 1. 创建认证数据表和种子数据
|
||||
- [x] 1.1 创建迁移脚本 `db/zqyy_app/migrations/YYYY-MM-DD__p3_create_auth_tables.sql`
|
||||
- 在 `auth` Schema 下创建 `users`、`user_applications`、`site_code_mapping`、`roles`、`permissions`、`role_permissions`、`user_site_roles`、`user_assistant_binding` 共 8 张表
|
||||
- 包含所有字段定义、约束、索引、外键
|
||||
- 使用 `IF NOT EXISTS` 幂等语法
|
||||
- 包含回滚语句(注释形式)
|
||||
- _Requirements: 1.1-1.10_
|
||||
|
||||
- [x] 1.2 创建种子数据脚本 `db/zqyy_app/migrations/YYYY-MM-DD__p3_seed_roles_permissions.sql`
|
||||
- 插入 5 条固定权限:`view_tasks`、`view_board`、`view_board_finance`、`view_board_customer`、`view_board_coach`
|
||||
- 插入默认角色:`coach`、`staff`、`site_admin`、`tenant_admin`
|
||||
- 插入角色-权限映射
|
||||
- 使用 `ON CONFLICT DO NOTHING` 幂等语法
|
||||
- _Requirements: 2.1-2.4_
|
||||
|
||||
- [x] 1.3 在测试库执行迁移脚本并验证
|
||||
- 在 `test_zqyy_app` 中执行建表脚本和种子数据脚本
|
||||
- 验证幂等性:连续执行两次无错误
|
||||
- 验证表结构、约束、索引正确
|
||||
- 验证种子数据完整(5 权限、4 角色、角色-权限映射)
|
||||
- _Requirements: 12.1_
|
||||
|
||||
- [x] 1.4 更新数据库手册和 DDL 基线
|
||||
- 创建 `docs/database/BD_Manual_auth_tables.md`,包含变更说明、兼容性影响、回滚策略、验证 SQL(至少 3 条)
|
||||
- 运行 `python scripts/ops/gen_consolidated_ddl.py` 刷新 DDL 基线
|
||||
- 在数据库手册中记录种子数据内容
|
||||
- _Requirements: 12.2, 12.3, 12.4_
|
||||
|
||||
- [x] 1.5 编写迁移脚本幂等性属性测试
|
||||
- **Property 1: 迁移脚本幂等性**
|
||||
- **Validates: Requirements 1.9, 2.4, 11.5**
|
||||
|
||||
- [x] 2. 扩展 JWT 服务和认证依赖
|
||||
- [x] 2.1 扩展 `apps/backend/app/auth/jwt.py`
|
||||
- 新增 `create_limited_token_pair(user_id)` 函数(pending 用户受限令牌)
|
||||
- 扩展 `create_access_token` 支持 `roles` 参数
|
||||
- 保持向后兼容
|
||||
- _Requirements: 10.1, 10.2, 10.3_
|
||||
|
||||
- [x] 2.2 扩展 `apps/backend/app/auth/dependencies.py`
|
||||
- 扩展 `CurrentUser` 数据类,增加 `roles`、`status`、`limited` 字段
|
||||
- 新增 `get_current_user_or_limited` 依赖(允许 pending 用户)
|
||||
- _Requirements: 10.3, 9.1_
|
||||
|
||||
- [x] 2.3 编写 JWT payload 结构属性测试
|
||||
- **Property 14: JWT payload 结构与状态一致性**
|
||||
- **Validates: Requirements 10.1, 10.2, 10.3**
|
||||
|
||||
- [x] 2.4 编写 JWT 过期/无效拒绝属性测试
|
||||
- **Property 15: JWT 过期/无效令牌拒绝**
|
||||
- **Validates: Requirements 9.4**
|
||||
|
||||
- [x] 3. 检查点 - 确保所有测试通过
|
||||
- 确保所有测试通过,如有问题请向用户确认。
|
||||
|
||||
- [x] 4. 实现微信认证服务
|
||||
- [x] 4.1 创建 `apps/backend/app/services/wechat.py`
|
||||
- 实现 `code2session(code)` 异步函数
|
||||
- 使用 `httpx.AsyncClient` 调用微信 API
|
||||
- 从环境变量读取 `WX_APPID` / `WX_SECRET`
|
||||
- 定义 `WeChatAuthError` 异常类
|
||||
- _Requirements: 3.1, 3.4_
|
||||
|
||||
- [x] 4.2 创建 Pydantic 模型 `apps/backend/app/schemas/xcx_auth.py`
|
||||
- 定义 `WxLoginRequest`、`WxLoginResponse`、`ApplicationRequest`、`ApplicationResponse`、`UserStatusResponse`、`SiteInfo`、`SwitchSiteRequest`、`MatchCandidate`、`ApproveRequest`、`RejectRequest`
|
||||
- `site_code` 使用正则校验 `^[A-Za-z]{2}\d{3}$`
|
||||
- `phone` 使用正则校验 `^\d{11}$`
|
||||
- _Requirements: 4.5_
|
||||
|
||||
- [x] 4.3 创建小程序认证路由 `apps/backend/app/routers/xcx_auth.py`
|
||||
- 实现 `POST /api/xcx/login`:微信登录(查找/创建用户 + 签发 JWT)
|
||||
- 实现 `POST /api/xcx/apply`:提交申请
|
||||
- 实现 `GET /api/xcx/me`:查询自身状态
|
||||
- 实现 `GET /api/xcx/me/sites`:查询关联店铺
|
||||
- 实现 `POST /api/xcx/switch-site`:切换店铺
|
||||
- 实现 `POST /api/xcx/refresh`:刷新令牌
|
||||
- 在 `apps/backend/app/main.py` 中注册路由
|
||||
- _Requirements: 3.2, 3.3, 3.5, 4.1-4.6, 7.1, 7.2, 8.2_
|
||||
|
||||
- [x] 4.4 编写登录创建/查找用户属性测试
|
||||
- **Property 2: 登录创建/查找用户正确性**
|
||||
- **Validates: Requirements 3.2, 3.3**
|
||||
|
||||
- [x] 4.5 编写申请创建正确性属性测试
|
||||
- **Property 4: 申请创建正确性**
|
||||
- **Validates: Requirements 4.1, 4.2, 4.3, 4.4**
|
||||
|
||||
- [x] 4.6 编写手机号格式验证属性测试
|
||||
- **Property 5: 手机号格式验证**
|
||||
- **Validates: Requirements 4.5**
|
||||
|
||||
- [x] 4.7 编写重复申请拒绝属性测试
|
||||
- **Property 6: 重复申请拒绝**
|
||||
- **Validates: Requirements 4.6**
|
||||
|
||||
- [x] 5. 实现申请服务和人员匹配
|
||||
- [x] 5.1 创建申请服务 `apps/backend/app/services/application.py`
|
||||
- 实现 `create_application()`:创建申请 + site_code 映射查找
|
||||
- 实现 `approve_application()`:批准 + 创建绑定/角色
|
||||
- 实现 `reject_application()`:拒绝 + 记录原因
|
||||
- 实现 `get_user_applications()`:查询用户申请列表
|
||||
- _Requirements: 4.1-4.4, 6.1-6.6_
|
||||
|
||||
- [x] 5.2 创建人员匹配服务 `apps/backend/app/services/matching.py`
|
||||
- 实现 `find_candidates(site_id, phone, employee_number)`
|
||||
- 通过 FDW 查询 `fdw_etl.v_dim_assistant` 和 `fdw_etl.v_dim_staff` / `v_dim_staff_ex`
|
||||
- 设置 `app.current_site_id` 进行 RLS 隔离
|
||||
- 合并助教和员工匹配结果
|
||||
- _Requirements: 5.1-5.6_
|
||||
|
||||
- [x] 5.3 创建角色权限服务 `apps/backend/app/services/role.py`
|
||||
- 实现 `get_user_permissions(user_id, site_id)`
|
||||
- 实现 `get_user_sites(user_id)`
|
||||
- 实现 `check_user_has_site_role(user_id, site_id)`
|
||||
- _Requirements: 8.1, 9.1_
|
||||
|
||||
- [x] 5.4 编写人员匹配合并属性测试
|
||||
- **Property 7: 人员匹配合并正确性**
|
||||
- **Validates: Requirements 5.1, 5.2, 5.3, 5.4**
|
||||
|
||||
- [x] 5.5 编写审核操作正确性属性测试
|
||||
- **Property 8: 审核操作正确性**
|
||||
- **Validates: Requirements 6.1, 6.2, 6.3, 6.4, 6.5**
|
||||
|
||||
- [x] 5.6 编写非 pending 审核拒绝属性测试
|
||||
- **Property 9: 非 pending 申请审核拒绝**
|
||||
- **Validates: Requirements 6.6**
|
||||
|
||||
- [ ] 6. 检查点 - 确保所有测试通过
|
||||
- 确保所有测试通过,如有问题请向用户确认。
|
||||
|
||||
- [ ] 7. 实现权限中间件和管理端路由
|
||||
- [ ] 7.1 创建权限中间件 `apps/backend/app/middleware/permission.py`
|
||||
- 实现 `require_permission(*permission_codes)` 依赖
|
||||
- 实现 `require_approved()` 依赖
|
||||
- 检查用户 status + 权限列表
|
||||
- _Requirements: 9.1-9.4_
|
||||
|
||||
- [ ] 7.2 创建管理端审核路由 `apps/backend/app/routers/admin_applications.py`
|
||||
- 实现 `GET /api/admin/applications`:查询申请列表
|
||||
- 实现 `GET /api/admin/applications/{id}`:查询申请详情 + 候选匹配
|
||||
- 实现 `POST /api/admin/applications/{id}/approve`:批准申请
|
||||
- 实现 `POST /api/admin/applications/{id}/reject`:拒绝申请
|
||||
- 在 `apps/backend/app/main.py` 中注册路由
|
||||
- _Requirements: 6.1-6.6, 5.1-5.6_
|
||||
|
||||
- [ ] 7.3 编写权限中间件拦截属性测试
|
||||
- **Property 13: 权限中间件拦截正确性**
|
||||
- **Validates: Requirements 8.3, 9.1, 9.2, 9.3**
|
||||
|
||||
- [ ] 7.4 编写多店铺角色独立分配属性测试
|
||||
- **Property 11: 多店铺角色独立分配**
|
||||
- **Validates: Requirements 8.1**
|
||||
|
||||
- [ ] 7.5 编写店铺切换令牌属性测试
|
||||
- **Property 12: 店铺切换令牌正确性**
|
||||
- **Validates: Requirements 8.2, 10.4**
|
||||
|
||||
- [ ] 8. 集成与端到端验证
|
||||
- [ ] 8.1 更新 `apps/backend/app/config.py` 新增微信配置项
|
||||
- 新增 `WX_APPID`、`WX_SECRET`、`WX_DEV_MODE` 配置读取
|
||||
- _Requirements: 3.1, 14.3_
|
||||
|
||||
- [ ] 8.2 更新 `apps/backend/app/main.py` 注册所有新路由
|
||||
- 确保 `xcx_auth` 和 `admin_applications` 路由已注册
|
||||
- 验证无路由冲突
|
||||
- _Requirements: 全部_
|
||||
|
||||
- [ ] 8.3 实现开发模式 mock 登录端点
|
||||
- 在 `routers/xcx_auth.py` 中新增 `POST /api/xcx/dev-login`
|
||||
- 仅在 `WX_DEV_MODE=true` 时注册
|
||||
- 接受 `openid` 和可选 `status` 参数,直接查找/创建用户并返回 JWT
|
||||
- _Requirements: 14.2, 14.3_
|
||||
|
||||
- [ ] 8.4 编写用户状态查询完整性属性测试
|
||||
- **Property 10: 用户状态查询完整性**
|
||||
- **Validates: Requirements 7.1, 7.2**
|
||||
|
||||
- [ ] 8.5 编写 disabled 用户登录拒绝属性测试
|
||||
- **Property 3: disabled 用户登录拒绝**
|
||||
- **Validates: Requirements 3.5**
|
||||
|
||||
- [ ] 9. 小程序认证前端页面
|
||||
- [ ] 9.1 实现请求封装工具 `apps/miniprogram/miniprogram/utils/request.ts`
|
||||
- 统一请求封装:自动附加 Authorization header
|
||||
- 401 时自动尝试 refresh_token 刷新
|
||||
- 刷新失败时跳转 login 页面
|
||||
- 后端 base URL 从配置读取(开发环境 `http://localhost:8000`)
|
||||
- _Requirements: 13.8_
|
||||
|
||||
- [ ] 9.2 实现登录页 `apps/miniprogram/miniprogram/pages/login/`
|
||||
- 调用 `wx.login()` 获取 code
|
||||
- 发送 code 到 `POST /api/xcx/login`
|
||||
- 根据返回的 `user_status` 路由到对应页面
|
||||
- 存储 token 到 globalData 和 Storage
|
||||
- 参考 H5 原型 `docs/h5_ui/pages/login.html`
|
||||
- _Requirements: 13.1, 13.6, 13.7, 13.8_
|
||||
|
||||
- [ ] 9.3 实现申请表单页 `apps/miniprogram/miniprogram/pages/apply/`
|
||||
- 表单字段:球房ID(site_code)、申请身份、手机号、编号(选填)、昵称
|
||||
- 前端校验:site_code 格式(2字母+3数字)、手机号(11位数字)
|
||||
- 提交到 `POST /api/xcx/apply`
|
||||
- 成功后跳转 reviewing 页面
|
||||
- 参考 H5 原型 `docs/h5_ui/pages/apply.html`
|
||||
- _Requirements: 13.2, 13.3_
|
||||
|
||||
- [ ] 9.4 实现审核等待页 `apps/miniprogram/miniprogram/pages/reviewing/`
|
||||
- 显示当前申请状态(审核中/已拒绝)
|
||||
- 显示申请信息摘要(球房ID、申请身份、手机号)
|
||||
- 拒绝时显示拒绝原因 + "重新申请"按钮
|
||||
- 支持下拉刷新查询最新状态
|
||||
- 参考 H5 原型 `docs/h5_ui/pages/reviewing.html`
|
||||
- _Requirements: 13.4, 13.5_
|
||||
|
||||
- [ ] 9.5 实现无权限页 `apps/miniprogram/miniprogram/pages/no-permission/`
|
||||
- 显示账号已禁用提示
|
||||
- 参考 H5 原型 `docs/h5_ui/pages/no-permission.html`
|
||||
- _Requirements: 13.7_
|
||||
|
||||
- [ ] 9.6 更新 `app.ts` 和 `app.json`
|
||||
- 在 `app.json` 中注册新页面(login、apply、reviewing、no-permission)
|
||||
- 在 `app.ts` 的 `onLaunch` 中实现自动登录逻辑
|
||||
- 根据用户状态路由到对应页面
|
||||
- 扩展 globalData 类型定义(token、userInfo、currentSiteId、sites)
|
||||
- _Requirements: 13.8_
|
||||
|
||||
- [ ] 10. 前后端联调验证
|
||||
- [ ] 10.1 编写联调指南文档 `apps/miniprogram/doc/auth-integration-guide.md`
|
||||
- 微信开发者工具项目导入配置说明
|
||||
- 后端启动步骤(含 `WX_DEV_MODE=true` 配置)
|
||||
- 测试流程:mock 登录 → 申请 → 管理端审核 → 重新登录验证
|
||||
- 常见问题排查
|
||||
- _Requirements: 14.1, 14.4_
|
||||
|
||||
- [ ] 10.2 在微信开发者工具中执行联调验证
|
||||
- 验证登录流程:wx.login → 后端 → JWT 返回
|
||||
- 验证申请流程:表单提交 → 后端创建申请 → 审核等待页展示
|
||||
- 验证状态路由:pending/approved/rejected/disabled 各状态正确跳转
|
||||
- 验证 token 刷新:access_token 过期后自动刷新
|
||||
- _Requirements: 14.1_
|
||||
|
||||
- [ ] 11. 最终检查点 - 确保所有测试通过
|
||||
- 确保所有测试通过,如有问题请向用户确认。
|
||||
|
||||
## 备注
|
||||
|
||||
- 标记 `*` 的任务为可选,可跳过以加速 MVP
|
||||
- 每个任务引用了具体的需求编号,确保可追溯
|
||||
- 检查点确保增量验证
|
||||
- 属性测试验证通用正确性属性(hypothesis,最少 100 次迭代)
|
||||
- 单元测试验证具体例子和边界情况
|
||||
- 所有数据库操作在测试库 `test_zqyy_app` 进行
|
||||
- 迁移脚本放在 `db/zqyy_app/migrations/` 目录
|
||||
- 属性测试放在 `tests/` 目录(Monorepo 级)
|
||||
1
.kiro/specs/[ETL]-fullstack-integration/.config.kiro
Normal file
1
.kiro/specs/[ETL]-fullstack-integration/.config.kiro
Normal file
@@ -0,0 +1 @@
|
||||
{"generationMode": "requirements-first"}
|
||||
252
.kiro/specs/[ETL]-fullstack-integration/design.md
Normal file
252
.kiro/specs/[ETL]-fullstack-integration/design.md
Normal file
@@ -0,0 +1,252 @@
|
||||
# 设计文档:ETL 全流程前后端联调(etl-fullstack-integration)
|
||||
|
||||
## 概述
|
||||
|
||||
本 Spec 是一个运维联调任务,不涉及新功能开发。目标是验证 `admin-web-console` Spec 产出的前后端代码在真实环境下的端到端正确性,同时收集性能数据。
|
||||
|
||||
核心流程:
|
||||
1. 启动后端 + 前端服务
|
||||
2. 通过 API 登录获取 JWT
|
||||
3. 提交全流程 ETL 任务(api_full, full_window, force-full, 全选常用任务, 自定义窗口 2025-11-01~2026-02-20, 30天切分, 全部门店)
|
||||
4. 实时监控执行过程,捕获错误/警告
|
||||
4. 执行完成后进行黑盒数据一致性测试(全链路检查器 `scripts/ops/etl_consistency_check.py` + FlowRunner 内置 `ConsistencyChecker`)
|
||||
5. 生成综合报告(含性能数据和黑盒测试结果)
|
||||
|
||||
## 架构
|
||||
|
||||
```
|
||||
联调脚本 (scripts/ops/)
|
||||
│
|
||||
├── 1. 启动服务
|
||||
│ ├── uvicorn app.main:app (后端 :8000)
|
||||
│ └── pnpm dev (前端 :5173)
|
||||
│
|
||||
├── 2. API 调用链
|
||||
│ ├── POST /api/auth/login → JWT
|
||||
│ ├── GET /api/tasks/registry → 任务列表
|
||||
│ ├── GET /api/tasks/sync-check → 同步检查
|
||||
│ ├── POST /api/tasks/validate → CLI 预览
|
||||
│ └── POST /api/execution/run → 触发执行
|
||||
│
|
||||
├── 3. 监控循环
|
||||
│ ├── GET /api/execution/queue → 状态轮询
|
||||
│ ├── GET /api/execution/{id}/logs → 日志获取
|
||||
│ └── 错误/警告检测
|
||||
│
|
||||
├── 4. 黑盒数据一致性测试
|
||||
│ ├── 全链路检查器(scripts/ops/etl_consistency_check.py)
|
||||
│ │ ├── API JSON vs ODS:字段完整性 + 值采样比对
|
||||
│ │ ├── ODS vs DWD:行数 + 字段映射 + 值比对(含 EX 表合并)
|
||||
│ │ ├── DWD vs DWS:聚合表行数 + 数值列健全性检查
|
||||
│ │ └── 白名单机制:ETL_META_COLS / SCD2_COLS / 空字符串≡None
|
||||
│ ├── FlowRunner 内置检查(quality/consistency_checker.py,自动执行)
|
||||
│ └── etl-data-consistency Hook(可选手动触发)
|
||||
│
|
||||
└── 5. 报告生成
|
||||
└── 输出到 SYSTEM_LOG_ROOT
|
||||
```
|
||||
|
||||
## 任务参数
|
||||
|
||||
根据用户需求,联调任务的具体参数:
|
||||
|
||||
```python
|
||||
INTEGRATION_TASK_CONFIG = {
|
||||
"flow": "api_full", # 全流程:API → ODS → DWD → DWS → INDEX
|
||||
"processing_mode": "full_window", # 全窗口处理
|
||||
"window_mode": "custom", # 自定义时间范围
|
||||
"window_start": "2025-11-01 00:00",
|
||||
"window_end": "2026-02-20 00:00",
|
||||
"window_split": "day", # 按天切分
|
||||
"window_split_days": 30, # 30天一个切片
|
||||
"force_full": True, # 强制全量
|
||||
"dry_run": False,
|
||||
"tasks": [ # 全选 is_common=True 的任务
|
||||
# ODS 层(22 个)
|
||||
"ODS_ASSISTANT_ACCOUNT", "ODS_ASSISTANT_LEDGER", "ODS_ASSISTANT_ABOLISH",
|
||||
"ODS_SETTLEMENT_RECORDS", "ODS_TABLE_USE", "ODS_TABLE_FEE_DISCOUNT",
|
||||
"ODS_TABLES", "ODS_PAYMENT", "ODS_REFUND", "ODS_PLATFORM_COUPON",
|
||||
"ODS_MEMBER", "ODS_MEMBER_CARD", "ODS_MEMBER_BALANCE", "ODS_RECHARGE_SETTLE",
|
||||
"ODS_GROUP_PACKAGE", "ODS_GROUP_BUY_REDEMPTION",
|
||||
"ODS_INVENTORY_STOCK", "ODS_INVENTORY_CHANGE",
|
||||
"ODS_GOODS_CATEGORY", "ODS_STORE_GOODS", "ODS_STORE_GOODS_SALES", "ODS_TENANT_GOODS",
|
||||
# DWD 层(1 个常用)
|
||||
"DWD_LOAD_FROM_ODS",
|
||||
# DWS 层(15 个常用,排除 DWS_MAINTENANCE)
|
||||
"DWS_BUILD_ORDER_SUMMARY", "DWS_ASSISTANT_DAILY", "DWS_ASSISTANT_MONTHLY",
|
||||
"DWS_ASSISTANT_CUSTOMER", "DWS_ASSISTANT_SALARY", "DWS_ASSISTANT_FINANCE",
|
||||
"DWS_MEMBER_CONSUMPTION", "DWS_MEMBER_VISIT",
|
||||
"DWS_FINANCE_DAILY", "DWS_FINANCE_RECHARGE", "DWS_FINANCE_INCOME_STRUCTURE",
|
||||
"DWS_FINANCE_DISCOUNT_DETAIL",
|
||||
"DWS_GOODS_STOCK_DAILY", "DWS_GOODS_STOCK_WEEKLY", "DWS_GOODS_STOCK_MONTHLY",
|
||||
# INDEX 层(3 个常用,排除 DWS_ML_MANUAL_IMPORT)
|
||||
"DWS_WINBACK_INDEX", "DWS_NEWCONV_INDEX", "DWS_RELATION_INDEX",
|
||||
],
|
||||
# store_id 由后端从 JWT 注入(默认管理员 site_id=1)
|
||||
# 注意:用户要求"全部门店",但当前系统只有 site_id=1,后续多门店需逐个执行
|
||||
}
|
||||
```
|
||||
|
||||
## 监控策略
|
||||
|
||||
- 轮询间隔:30 秒
|
||||
- 最长等待:30 分钟(无新日志输出时)
|
||||
- 错误检测:日志行匹配 `ERROR`、`CRITICAL`、`Traceback`、`Exception`
|
||||
- 警告检测:日志行匹配 `WARNING`、`WARN`
|
||||
- 计时解析:从日志中提取时间戳,计算阶段耗时
|
||||
|
||||
## 黑盒数据一致性测试
|
||||
|
||||
### 两套检查工具的定位
|
||||
|
||||
| 工具 | 路径 | 触发方式 | 覆盖范围 | 适用场景 |
|
||||
|------|------|---------|---------|---------|
|
||||
| 全链路检查器 | `scripts/ops/etl_consistency_check.py` | 手动运行 / `etl-data-consistency` Hook | API→ODS→DWD→DWS 四层全链路 | 联调后独立全面验证(本 Spec 主要使用) |
|
||||
| FlowRunner 内置检查 | `apps/etl/connectors/feiqiu/quality/consistency_checker.py` | FlowRunner 自动调用 `_run_post_consistency_check()` | API→ODS 字段 + ODS→DWD 映射/值 | ETL 执行后自动轻量检查 |
|
||||
|
||||
联调场景下,FlowRunner 在 ETL 执行完成后已自动运行内置检查并输出报告到 `ETL_REPORT_ROOT`。联调脚本额外运行全链路检查器,覆盖 DWD→DWS 聚合验证和更深入的值采样比对。
|
||||
|
||||
### etl-data-consistency Hook
|
||||
|
||||
`.kiro/hooks/etl-data-consistency.kiro.hook` 提供手动触发入口,执行 `scripts/ops/etl_consistency_check.py`。联调任务 5 也可通过此 Hook 替代手动运行脚本。
|
||||
|
||||
### 白名单机制
|
||||
|
||||
全链路检查器在值比对时使用三层白名单过滤,避免 ETL 框架自动填充的列和已知等价差异产生误报:
|
||||
|
||||
#### 1. ETL 元数据列白名单(`ETL_META_COLS`)
|
||||
|
||||
不参与 API↔ODS 值比对的列:
|
||||
|
||||
```python
|
||||
ETL_META_COLS = {"source_file", "source_endpoint", "fetched_at", "payload", "content_hash"}
|
||||
```
|
||||
|
||||
这些列由 ETL 框架在 ODS 落库时自动填充,API 源数据中不存在。
|
||||
|
||||
#### 2. SCD2 管理列白名单(`SCD2_COLS`)
|
||||
|
||||
不参与 ODS↔DWD 值比对的列:
|
||||
|
||||
```python
|
||||
SCD2_COLS = {
|
||||
"valid_from", "valid_to", "is_current", "etl_loaded_at", "etl_batch_id",
|
||||
"scd2_start_time", "scd2_end_time", "scd2_is_current", "scd2_version",
|
||||
}
|
||||
```
|
||||
|
||||
这些列由 DWD 层 SCD2 逻辑自动维护,ODS 源数据中不存在。
|
||||
|
||||
#### 3. 空字符串 vs None 等价规则(`_values_differ()`)
|
||||
|
||||
API 返回空字符串 `""` 而数据库存储为 `None` 时,视为等价(不算差异),但标记为 `whitelist`:
|
||||
|
||||
```python
|
||||
# API 空字符串 "" vs DB None → 白名单(等价但标记)
|
||||
if api_val is not None and ods_val is None:
|
||||
if isinstance(api_val, str) and api_val.strip() == "":
|
||||
return False, "whitelist"
|
||||
```
|
||||
|
||||
报告中白名单差异以折叠 `<details>` 块展示,不计入失败统计。
|
||||
|
||||
#### 4. FlowRunner 内置检查的白名单
|
||||
|
||||
`consistency_checker.py` 使用类似但略有不同的白名单:
|
||||
- `ODS_META_COLUMNS`:与 `ETL_META_COLS` 相同,额外包含 `record_index`
|
||||
- `KNOWN_NO_SOURCE`:按表配置的已知无源字段(如 `dwd.dim_member.update_time`),标记为已知无源而非报错
|
||||
|
||||
### 调用方式
|
||||
|
||||
联调脚本在 ETL 全流程执行完成后,运行全链路检查器:
|
||||
|
||||
```bash
|
||||
cd C:\NeoZQYY
|
||||
uv run python scripts/ops/etl_consistency_check.py
|
||||
```
|
||||
|
||||
脚本自动完成:
|
||||
1. 从 `LOG_ROOT` 找到最近一次 ETL 日志,解析执行的任务列表
|
||||
2. 从 `FETCH_ROOT` 读取 API JSON 落盘文件
|
||||
3. 连接数据库(`PG_DSN`),逐表逐字段比对:
|
||||
- API JSON vs ODS:字段完整性 + 值采样比对(随机 5 条)
|
||||
- ODS vs DWD:行数 + 字段映射 + 值比对(含 EX 表合并)
|
||||
- DWD vs DWS:聚合表行数 + 数值列健全性(NULL 率、负值、min/max)
|
||||
4. 输出 Markdown 报告到 `ETL_REPORT_ROOT`
|
||||
|
||||
### 检查内容
|
||||
|
||||
| 检查类型 | 对比对象 | 检查项 | 白名单处理 |
|
||||
|---------|---------|--------|-----------|
|
||||
| API vs ODS | API JSON 缓存 vs ODS 表 | 字段完整性 + 值采样比对(5 条记录) | `ETL_META_COLS` 排除;空字符串≡None |
|
||||
| ODS vs DWD | ODS 表 vs DWD 表(含 EX 表) | 行数对比 + 字段映射 + 值比对 | `SCD2_COLS` 排除;空字符串≡None |
|
||||
| DWD vs DWS | DWD 源表 vs DWS 聚合表 | 行数非空 + 数值列健全性(NULL 率、负值、min/max) | 无(DWS 为聚合结果,不做逐行值比对) |
|
||||
|
||||
### 报告格式
|
||||
|
||||
全链路检查器输出 Markdown 报告,包含:
|
||||
1. ETL 执行概览(任务列表、成功/失败/跳过统计)
|
||||
2. API↔ODS 数据一致性(逐表逐字段值比对,白名单差异折叠展示)
|
||||
3. ODS↔DWD 数据一致性(行数对比 + 映射验证 + 值采样,含字段级统计)
|
||||
4. DWD↔DWS 数据一致性(DWS 表概览 + 数值列健全性检查)
|
||||
5. 异常汇总与建议
|
||||
|
||||
### 参考数据
|
||||
|
||||
`dataflow-field-completion` 的实际执行结果:API vs ODS 22/22 通过,ODS vs DWD 38/42 通过。本次联调执行 api_full 全流程后,预期结果应与此一致或更优(因为联调包含最新的字段补全)。
|
||||
|
||||
## 报告格式
|
||||
|
||||
报告输出为 Markdown 文件,路径:`{SYSTEM_LOG_ROOT}/{date}__etl_integration_report.md`
|
||||
|
||||
```markdown
|
||||
# ETL 全流程联调报告
|
||||
|
||||
## 执行概要
|
||||
- 任务参数:...
|
||||
- 开始时间 / 结束时间 / 总时长
|
||||
- 退出码 / 最终状态
|
||||
|
||||
## 性能报告
|
||||
- 各窗口切片耗时对比表
|
||||
- Top-5 耗时阶段
|
||||
- 总体吞吐量估算
|
||||
|
||||
## 黑盒测试报告
|
||||
- API vs ODS:X/Y 张表通过(白名单差异 N 处)
|
||||
- ODS vs DWD:X/Y 张表通过(白名单差异 N 处)
|
||||
- DWD vs DWS:X 张表有数据 / Y 张总计,异常指标 N 个
|
||||
- 失败表清单及差异明细
|
||||
|
||||
## DEBUG 报告(如有)
|
||||
- 错误摘要
|
||||
- 警告摘要
|
||||
- 相关日志片段
|
||||
- 可能的原因分析
|
||||
```
|
||||
|
||||
## 正确性属性
|
||||
|
||||
本 Spec 为运维联调任务,不涉及新功能代码开发,因此不定义形式化的属性测试。验证通过以下方式进行:
|
||||
- 服务健康检查通过
|
||||
- 任务提交成功并开始执行
|
||||
- 执行完成后退出码和日志符合预期
|
||||
- 黑盒数据一致性测试通过(全链路检查器覆盖 API→ODS→DWD→DWS 四层,白名单差异不计入失败)
|
||||
- 报告文件成功生成(含性能报告和黑盒测试报告)
|
||||
|
||||
## 测试策略
|
||||
|
||||
本 Spec 本身就是一次集成测试。不额外编写单元测试或属性测试。验证标准:
|
||||
- 后端 API 响应正确
|
||||
- ETL CLI 子进程正常启动和执行
|
||||
- 日志正确捕获和推送
|
||||
- 黑盒数据一致性测试通过(全链路检查器 API→ODS→DWD→DWS + FlowRunner 内置检查)
|
||||
- 报告文件正确生成到 ETL_REPORT_ROOT(全链路检查报告)和 SYSTEM_LOG_ROOT(联调综合报告)
|
||||
|
||||
### 黑盒测试验证标准
|
||||
|
||||
- API vs ODS:所有已采集的 ODS 表字段完整性和值采样检查通过(白名单差异不计入失败)
|
||||
- ODS vs DWD:所有已配置映射的表行数和值比对检查通过(SCD2 列排除,白名单差异不计入失败)
|
||||
- DWD vs DWS:所有 DWS 聚合表行数非空,数值列无异常(高 NULL 率、金额负值等)
|
||||
- 全链路检查报告 `consistency_check_<timestamp>.md` 成功生成到 ETL_REPORT_ROOT
|
||||
- 综合联调报告中包含黑盒测试结果摘要(含白名单差异统计)
|
||||
@@ -58,13 +58,28 @@
|
||||
3. THE 报告 SHALL 标注耗时最长的 Top-5 阶段/任务
|
||||
4. THE 报告 SHALL 包含每个窗口切片(30天)的独立耗时对比
|
||||
|
||||
### 需求 5:联调报告输出
|
||||
### 需求 5:黑盒数据一致性测试
|
||||
|
||||
**用户故事:** 作为开发者,我希望联调完成后获得一份综合报告,包含执行情况、性能数据和可能的 DEBUG 信息。
|
||||
**用户故事:** 作为开发者,我希望在 ETL 全流程执行完成后,以黑盒视角对比数据上下游的字段差异和值差异,验证数据从 API 到 ODS 再到 DWD 再到 DWS 的完整性和正确性。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN ETL 全流程执行完成后, THE 联调脚本 SHALL 运行全链路检查器 `scripts/ops/etl_consistency_check.py`,执行 API→ODS→DWD→DWS 四层数据一致性检查
|
||||
2. WHEN 执行 API vs ODS 检查时, THE 检查器 SHALL 对比 API JSON 落盘数据与 ODS 落库数据的字段完整性和值采样(随机 5 条记录的关键字段),覆盖所有已采集的 ODS 表
|
||||
3. WHEN 执行 ODS vs DWD 检查时, THE 检查器 SHALL 对比 ODS 数据与 DWD 落库数据的行数、字段映射正确性和值一致性(含 EX 表合并比对)
|
||||
4. WHEN 执行 DWD vs DWS 检查时, THE 检查器 SHALL 验证 DWS 聚合表的行数非空、数值列健全性(NULL 率、负值、min/max),标注异常指标
|
||||
5. WHEN 值比对遇到白名单场景时, THE 检查器 SHALL 将 ETL 元数据列(`source_file`、`source_endpoint`、`fetched_at`、`payload`、`content_hash`)和 SCD2 管理列排除在值比对之外,并将 API 空字符串 `""` vs DB `None` 视为等价(标记为 whitelist)
|
||||
6. WHEN 黑盒测试完成后, THE 检查器 SHALL 输出 Markdown 报告到 `ETL_REPORT_ROOT` 环境变量指定的目录
|
||||
7. WHEN 黑盒测试报告生成后, THE 报告 SHALL 包含每张表的检查结果、差异明细(含白名单差异折叠展示)、通过/失败状态、字段级统计、以及汇总统计
|
||||
|
||||
### 需求 6:联调报告输出
|
||||
|
||||
**用户故事:** 作为开发者,我希望联调完成后获得一份综合报告,包含执行情况、性能数据、黑盒测试结果和可能的 DEBUG 信息。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE 报告 SHALL 包含:执行概要(任务参数、开始/结束时间、总时长、退出码)
|
||||
2. THE 报告 SHALL 包含:性能报告(各阶段耗时、窗口切片耗时对比、Top-5 瓶颈)
|
||||
3. IF 执行过程中出现错误或警告, THEN THE 报告 SHALL 包含 DEBUG 报告(错误摘要、相关日志片段、可能的原因分析)
|
||||
4. THE 报告 SHALL 输出到 `SYSTEM_LOG_ROOT` 环境变量指定的目录
|
||||
3. THE 报告 SHALL 包含:黑盒测试结果摘要(API vs ODS 通过数/总数、ODS vs DWD 通过数/总数、DWD vs DWS 表概览、失败表清单、白名单差异数量)
|
||||
4. IF 执行过程中出现错误或警告, THEN THE 报告 SHALL 包含 DEBUG 报告(错误摘要、相关日志片段、可能的原因分析)
|
||||
5. THE 报告 SHALL 输出到 `SYSTEM_LOG_ROOT` 环境变量指定的目录
|
||||
131
.kiro/specs/[ETL]-fullstack-integration/tasks.md
Normal file
131
.kiro/specs/[ETL]-fullstack-integration/tasks.md
Normal file
@@ -0,0 +1,131 @@
|
||||
# 实现计划:ETL 全流程前后端联调(etl-fullstack-integration)
|
||||
|
||||
## 概述
|
||||
|
||||
基于 `admin-web-console` 已完成的前后端代码,进行端到端联调验证。全程使用 Playwright 浏览器模拟真实用户操作(登录、配置、提交、监控),不直接调用 API。通过管理后台 UI 提交 api_full 全流程 ETL 任务(自定义窗口 2025-11-01~2026-02-20,30天切分,force-full,全选常用任务),实时监控执行过程,收集性能数据,执行黑盒数据一致性测试(全链路检查器 `scripts/ops/etl_consistency_check.py` + FlowRunner 内置 `ConsistencyChecker`),最终生成综合报告。
|
||||
|
||||
## 任务
|
||||
|
||||
- [x] 1. 服务启动与健康检查
|
||||
- [x] 1.1 启动后端服务(`apps/backend/`,uvicorn :8000),确认 API 可达
|
||||
- 使用 `controlPwshProcess` 启动 `uvicorn app.main:app --host 0.0.0.0 --port 8000`,cwd 为 `apps/backend/`
|
||||
- 等待服务就绪,验证 `http://localhost:8000/docs` 可访问
|
||||
- _Requirements: 1.1_
|
||||
|
||||
- [x] 1.2 启动前端服务(`apps/admin-web/`,pnpm dev :5173),确认页面可访问
|
||||
- 使用 `controlPwshProcess` 启动 `pnpm dev`,cwd 为 `apps/admin-web/`
|
||||
- 等待 Vite 就绪,验证 `http://localhost:5173` 可访问
|
||||
- _Requirements: 1.2_
|
||||
|
||||
- [x] 1.3 浏览器登录与健康检查
|
||||
- 使用 Playwright 打开 `http://localhost:5173`,应自动跳转到 `/login`
|
||||
- 在登录表单中输入用户名 `admin`、密码 `admin123`,点击登录按钮
|
||||
- 验证登录成功后跳转到任务配置页面(`/`)
|
||||
- 确认侧边栏导航菜单正常渲染(任务配置、任务管理、ETL 状态、数据库、日志、环境配置、运维面板)
|
||||
- _Requirements: 1.3, 1.4, 1.5_
|
||||
|
||||
- [x] 2. 浏览器操作:任务配置与提交
|
||||
- [x] 2.1 在任务配置页面填写全流程参数
|
||||
- 在任务配置页面(`/`),选择 Flow 为 `api_full`(API → ODS → DWD → DWS → INDEX)
|
||||
- 选择处理模式为 `full_window`(全窗口)
|
||||
- 设置时间窗口模式为"自定义",填入开始时间 `2025-11-01`、结束时间 `2026-02-20`
|
||||
- 设置窗口切分为"按天",切分天数为 `30`
|
||||
- 勾选 `force_full`(强制全量)
|
||||
- 在任务选择区域,全选 `is_common=True` 的常用任务(共 41 个)
|
||||
- 确认 CLI 命令预览区域显示完整参数(--flow api_full --processing-mode full_window --window-start ... --window-end ... --window-split day --window-split-days 30 --force-full --tasks ...)
|
||||
- _Requirements: 2.1_
|
||||
|
||||
- [x] 2.2 通过浏览器提交任务执行
|
||||
- 点击"直接执行"按钮(SendOutlined 图标),触发 `POST /api/execution/run`
|
||||
- 确认页面显示任务提交成功的提示消息
|
||||
- 记录返回的 execution_id(从页面响应或跳转中获取)
|
||||
- _Requirements: 2.2, 2.4_
|
||||
|
||||
- [ ] 3. 浏览器操作:执行监控与 DEBUG
|
||||
- [x] 3.1 在任务管理页面监控执行状态
|
||||
- 导航到"任务管理"页面(`/task-manager`),点击侧边栏"任务管理"菜单
|
||||
- 在"队列"Tab 中确认刚提交的任务状态为 `running`
|
||||
- 点击 running 状态的任务行,打开 WebSocket 实时日志流抽屉
|
||||
- 持续观察日志输出,每 30 秒检查一次页面状态
|
||||
- 检测日志中的 ERROR / CRITICAL / Traceback / Exception / WARNING 关键字
|
||||
- 如果连续 30 分钟无新日志输出,报告超时警告
|
||||
- 任务完成(success/failed/cancelled)时停止监控
|
||||
- _Requirements: 3.1, 3.2, 3.3, 3.4, 3.5_
|
||||
|
||||
- [ ] 3.2 对执行过程中发现的错误/警告进行 DEBUG 分析
|
||||
- 从日志流中收集所有 ERROR 和 WARNING 日志行及其上下文
|
||||
- 分析错误类型:API 超时、数据库连接、数据质量、配置问题等
|
||||
- 如果任务失败,切换到"历史"Tab 查看完整执行详情和日志
|
||||
- 记录 DEBUG 发现到报告中
|
||||
- _Requirements: 3.2, 3.5_
|
||||
|
||||
- [ ] 4. 性能计时与报告生成
|
||||
- [ ] 4.1 从浏览器获取执行日志,提取精细计时数据
|
||||
- 在"任务管理"→"历史"Tab 中,点击已完成的任务查看执行详情
|
||||
- 通过 `GET /api/execution/{id}/logs` 获取完整日志(可通过浏览器或 API 辅助)
|
||||
- 从日志中提取每个窗口切片(30天)的开始/结束时间
|
||||
- 计算每个切片的耗时
|
||||
- 识别 ODS / DWD / DWS / INDEX 各阶段的耗时
|
||||
- 标注 Top-5 耗时最长的阶段/任务
|
||||
- _Requirements: 4.1, 4.2, 4.3, 4.4_
|
||||
|
||||
- [ ] 4.2 生成综合联调报告,输出到 SYSTEM_LOG_ROOT
|
||||
- 报告包含:执行概要(参数、时间、退出码)
|
||||
- 报告包含:性能报告(各切片耗时对比、Top-5 瓶颈)
|
||||
- 报告包含:DEBUG 报告(如有错误/警告)
|
||||
- 黑盒测试结果摘要将在任务 5.3 中追加
|
||||
- 输出路径:`{SYSTEM_LOG_ROOT}/{date}__etl_integration_report.md`
|
||||
- 路径通过 `SYSTEM_LOG_ROOT` 环境变量获取,缺失时报错
|
||||
- _Requirements: 6.1, 6.2, 6.4, 6.5_
|
||||
|
||||
- [ ] 5. 黑盒数据一致性测试
|
||||
- [ ] 5.1 运行全链路检查器,执行 API→ODS→DWD→DWS 四层数据一致性检查
|
||||
- 运行 `uv run python scripts/ops/etl_consistency_check.py`(cwd 为项目根目录 `C:\NeoZQYY`)
|
||||
- 脚本自动从 `LOG_ROOT` 找到最近一次 ETL 日志,解析本次执行的任务列表
|
||||
- 脚本自动从 `FETCH_ROOT` 读取 API JSON 落盘文件
|
||||
- 脚本连接数据库(`PG_DSN`),逐表逐字段比对:
|
||||
- API JSON vs ODS:字段完整性 + 值采样比对(随机 5 条记录),`ETL_META_COLS` 白名单列排除
|
||||
- ODS vs DWD:行数对比 + 字段映射 + 值比对(含 EX 表合并),`SCD2_COLS` 白名单列排除
|
||||
- DWD vs DWS:聚合表行数非空检查 + 数值列健全性(NULL 率、负值、min/max)
|
||||
- 白名单处理:API 空字符串 `""` vs DB `None` 视为等价,标记为 whitelist,不计入失败
|
||||
- 报告自动输出到 `ETL_REPORT_ROOT`(环境变量,缺失时报错)
|
||||
- 备选触发方式:可通过 `etl-data-consistency` Hook 手动触发(效果等同)
|
||||
- _Requirements: 5.1, 5.2, 5.3, 5.4, 5.5_
|
||||
|
||||
- [ ] 5.2 检查 FlowRunner 内置一致性报告
|
||||
- FlowRunner 在 ETL 执行完成后已自动调用 `_run_post_consistency_check()` 生成报告到 `ETL_REPORT_ROOT`
|
||||
- 确认内置报告已生成,检查 API vs ODS 和 ODS vs DWD 的通过/失败统计
|
||||
- 内置检查使用 `ODS_META_COLUMNS` 白名单(含 `record_index`)和 `KNOWN_NO_SOURCE` 按表白名单
|
||||
- 对比两份报告的结论是否一致(全链路检查器 vs FlowRunner 内置检查)
|
||||
- _Requirements: 5.1, 5.2, 5.3_
|
||||
|
||||
- [ ] 5.3 将黑盒测试结果摘要写入综合联调报告
|
||||
- 在任务 4.2 生成的联调报告中追加"黑盒测试报告"章节
|
||||
- 包含:API vs ODS 通过数/总数、ODS vs DWD 通过数/总数、DWD vs DWS 表概览
|
||||
- 包含:白名单差异数量统计、失败表清单
|
||||
- 引用全链路检查报告的完整路径
|
||||
- _Requirements: 6.3_
|
||||
|
||||
- [ ] 6. 服务清理
|
||||
- [ ] 6.1 关闭浏览器,停止后端和前端服务,清理资源
|
||||
- 关闭 Playwright 浏览器实例
|
||||
- 停止 uvicorn 后端进程(`controlPwshProcess` stop)
|
||||
- 停止 pnpm dev 前端进程(`controlPwshProcess` stop)
|
||||
- 报告联调完成状态
|
||||
|
||||
## 说明
|
||||
|
||||
- 本 Spec 为运维联调任务,不涉及新功能代码开发
|
||||
- 不编写属性测试或单元测试,联调本身即为集成验证
|
||||
- **全程使用 Playwright 浏览器模拟真实用户操作**:登录、页面导航、表单填写、按钮点击、日志查看等均通过浏览器完成
|
||||
- **黑盒测试使用两套工具**:
|
||||
- 全链路检查器 `scripts/ops/etl_consistency_check.py`:覆盖 API→ODS→DWD→DWS 四层,联调主要使用
|
||||
- FlowRunner 内置 `ConsistencyChecker`(`quality/consistency_checker.py`):ETL 执行后自动运行,覆盖 API→ODS + ODS→DWD
|
||||
- **白名单机制**:`ETL_META_COLS`(ODS 元数据列)、`SCD2_COLS`(SCD2 管理列)排除在值比对之外;API 空字符串 `""` vs DB `None` 视为等价
|
||||
- **`etl-data-consistency` Hook**(`.kiro/hooks/etl-data-consistency.kiro.hook`)可作为手动触发全链路检查的替代方式
|
||||
- 黑盒测试在 ETL 全流程执行完成后、服务清理前执行,确保数据库中有最新数据可供对比
|
||||
- 全选常用任务 = 任务注册表中 `is_common=True` 的所有任务(共 41 个)
|
||||
- "全部门店":当前系统仅有 site_id=1(默认管理员绑定),如需多门店需逐个执行
|
||||
- 监控允许空闲等待,最长 30 分钟无新日志才报超时
|
||||
- 报告输出路径遵循 export-paths 规范:全链路检查报告输出到 `ETL_REPORT_ROOT`,联调综合报告输出到 `SYSTEM_LOG_ROOT`
|
||||
- 全链路检查器需要 `PG_DSN`、`FETCH_ROOT`、`LOG_ROOT`、`ETL_REPORT_ROOT` 环境变量,缺失时报错
|
||||
@@ -1,126 +0,0 @@
|
||||
# 设计文档:ETL 全流程前后端联调(etl-fullstack-integration)
|
||||
|
||||
## 概述
|
||||
|
||||
本 Spec 是一个运维联调任务,不涉及新功能开发。目标是验证 `admin-web-console` Spec 产出的前后端代码在真实环境下的端到端正确性,同时收集性能数据。
|
||||
|
||||
核心流程:
|
||||
1. 启动后端 + 前端服务
|
||||
2. 通过 API 登录获取 JWT
|
||||
3. 提交全流程 ETL 任务(api_full, full_window, force-full, 全选常用任务, 自定义窗口 2025-11-01~2026-02-20, 30天切分, 全部门店)
|
||||
4. 实时监控执行过程,捕获错误/警告
|
||||
5. 执行完成后生成综合报告
|
||||
|
||||
## 架构
|
||||
|
||||
```
|
||||
联调脚本 (scripts/ops/)
|
||||
│
|
||||
├── 1. 启动服务
|
||||
│ ├── uvicorn app.main:app (后端 :8000)
|
||||
│ └── pnpm dev (前端 :5173)
|
||||
│
|
||||
├── 2. API 调用链
|
||||
│ ├── POST /api/auth/login → JWT
|
||||
│ ├── GET /api/tasks/registry → 任务列表
|
||||
│ ├── GET /api/tasks/sync-check → 同步检查
|
||||
│ ├── POST /api/tasks/validate → CLI 预览
|
||||
│ └── POST /api/execution/run → 触发执行
|
||||
│
|
||||
├── 3. 监控循环
|
||||
│ ├── GET /api/execution/queue → 状态轮询
|
||||
│ ├── GET /api/execution/{id}/logs → 日志获取
|
||||
│ └── 错误/警告检测
|
||||
│
|
||||
└── 4. 报告生成
|
||||
└── 输出到 SYSTEM_LOG_ROOT
|
||||
```
|
||||
|
||||
## 任务参数
|
||||
|
||||
根据用户需求,联调任务的具体参数:
|
||||
|
||||
```python
|
||||
INTEGRATION_TASK_CONFIG = {
|
||||
"flow": "api_full", # 全流程:API → ODS → DWD → DWS → INDEX
|
||||
"processing_mode": "full_window", # 全窗口处理
|
||||
"window_mode": "custom", # 自定义时间范围
|
||||
"window_start": "2025-11-01 00:00",
|
||||
"window_end": "2026-02-20 00:00",
|
||||
"window_split": "day", # 按天切分
|
||||
"window_split_days": 30, # 30天一个切片
|
||||
"force_full": True, # 强制全量
|
||||
"dry_run": False,
|
||||
"tasks": [ # 全选 is_common=True 的任务
|
||||
# ODS 层(22 个)
|
||||
"ODS_ASSISTANT_ACCOUNT", "ODS_ASSISTANT_LEDGER", "ODS_ASSISTANT_ABOLISH",
|
||||
"ODS_SETTLEMENT_RECORDS", "ODS_TABLE_USE", "ODS_TABLE_FEE_DISCOUNT",
|
||||
"ODS_TABLES", "ODS_PAYMENT", "ODS_REFUND", "ODS_PLATFORM_COUPON",
|
||||
"ODS_MEMBER", "ODS_MEMBER_CARD", "ODS_MEMBER_BALANCE", "ODS_RECHARGE_SETTLE",
|
||||
"ODS_GROUP_PACKAGE", "ODS_GROUP_BUY_REDEMPTION",
|
||||
"ODS_INVENTORY_STOCK", "ODS_INVENTORY_CHANGE",
|
||||
"ODS_GOODS_CATEGORY", "ODS_STORE_GOODS", "ODS_STORE_GOODS_SALES", "ODS_TENANT_GOODS",
|
||||
# DWD 层(1 个常用)
|
||||
"DWD_LOAD_FROM_ODS",
|
||||
# DWS 层(15 个常用,排除 DWS_MAINTENANCE)
|
||||
"DWS_BUILD_ORDER_SUMMARY", "DWS_ASSISTANT_DAILY", "DWS_ASSISTANT_MONTHLY",
|
||||
"DWS_ASSISTANT_CUSTOMER", "DWS_ASSISTANT_SALARY", "DWS_ASSISTANT_FINANCE",
|
||||
"DWS_MEMBER_CONSUMPTION", "DWS_MEMBER_VISIT",
|
||||
"DWS_FINANCE_DAILY", "DWS_FINANCE_RECHARGE", "DWS_FINANCE_INCOME_STRUCTURE",
|
||||
"DWS_FINANCE_DISCOUNT_DETAIL",
|
||||
"DWS_GOODS_STOCK_DAILY", "DWS_GOODS_STOCK_WEEKLY", "DWS_GOODS_STOCK_MONTHLY",
|
||||
# INDEX 层(3 个常用,排除 DWS_ML_MANUAL_IMPORT)
|
||||
"DWS_WINBACK_INDEX", "DWS_NEWCONV_INDEX", "DWS_RELATION_INDEX",
|
||||
],
|
||||
# store_id 由后端从 JWT 注入(默认管理员 site_id=1)
|
||||
# 注意:用户要求"全部门店",但当前系统只有 site_id=1,后续多门店需逐个执行
|
||||
}
|
||||
```
|
||||
|
||||
## 监控策略
|
||||
|
||||
- 轮询间隔:30 秒
|
||||
- 最长等待:30 分钟(无新日志输出时)
|
||||
- 错误检测:日志行匹配 `ERROR`、`CRITICAL`、`Traceback`、`Exception`
|
||||
- 警告检测:日志行匹配 `WARNING`、`WARN`
|
||||
- 计时解析:从日志中提取时间戳,计算阶段耗时
|
||||
|
||||
## 报告格式
|
||||
|
||||
报告输出为 Markdown 文件,路径:`{SYSTEM_LOG_ROOT}/{date}__etl_integration_report.md`
|
||||
|
||||
```markdown
|
||||
# ETL 全流程联调报告
|
||||
|
||||
## 执行概要
|
||||
- 任务参数:...
|
||||
- 开始时间 / 结束时间 / 总时长
|
||||
- 退出码 / 最终状态
|
||||
|
||||
## 性能报告
|
||||
- 各窗口切片耗时对比表
|
||||
- Top-5 耗时阶段
|
||||
- 总体吞吐量估算
|
||||
|
||||
## DEBUG 报告(如有)
|
||||
- 错误摘要
|
||||
- 警告摘要
|
||||
- 相关日志片段
|
||||
- 可能的原因分析
|
||||
```
|
||||
|
||||
## 正确性属性
|
||||
|
||||
本 Spec 为运维联调任务,不涉及新功能代码开发,因此不定义形式化的属性测试。验证通过以下方式进行:
|
||||
- 服务健康检查通过
|
||||
- 任务提交成功并开始执行
|
||||
- 执行完成后退出码和日志符合预期
|
||||
- 报告文件成功生成
|
||||
|
||||
## 测试策略
|
||||
|
||||
本 Spec 本身就是一次集成测试。不额外编写单元测试或属性测试。验证标准:
|
||||
- 后端 API 响应正确
|
||||
- ETL CLI 子进程正常启动和执行
|
||||
- 日志正确捕获和推送
|
||||
- 报告文件正确生成到 SYSTEM_LOG_ROOT
|
||||
@@ -1,86 +0,0 @@
|
||||
# 实现计划:ETL 全流程前后端联调(etl-fullstack-integration)
|
||||
|
||||
## 概述
|
||||
|
||||
基于 `admin-web-console` 已完成的前后端代码,进行端到端联调验证。通过后端 API 提交 api_full 全流程 ETL 任务(自定义窗口 2025-11-01~2026-02-20,30天切分,force-full,全选常用任务),实时监控执行过程,收集性能数据,最终生成综合报告。
|
||||
|
||||
## 任务
|
||||
|
||||
- [ ] 1. 服务启动与健康检查
|
||||
- [ ] 1.1 启动后端服务(`apps/backend/`,uvicorn :8000),确认 API 可达
|
||||
- 启动 `uvicorn app.main:app --host 0.0.0.0 --port 8000`,cwd 为 `apps/backend/`
|
||||
- 验证 `GET /api/tasks/flows` 返回 200
|
||||
- _Requirements: 1.1_
|
||||
|
||||
- [ ] 1.2 启动前端服务(`apps/admin-web/`,pnpm dev :5173),确认页面可访问
|
||||
- 启动 `pnpm dev`,cwd 为 `apps/admin-web/`
|
||||
- 验证 `http://localhost:5173` 可达
|
||||
- _Requirements: 1.2_
|
||||
|
||||
- [ ] 1.3 API 健康检查:登录获取 JWT,验证任务注册表,执行 sync-check
|
||||
- `POST /api/auth/login` 使用默认管理员账号(admin / admin123),获取 JWT
|
||||
- `GET /api/tasks/registry` 确认返回非空任务列表
|
||||
- `GET /api/tasks/sync-check` 确认 `in_sync=true`(后端注册表与 ETL 真实注册表一致)
|
||||
- 如果 sync-check 不一致,记录差异并向用户报告
|
||||
- _Requirements: 1.3, 1.4, 1.5_
|
||||
|
||||
- [ ] 2. 全流程任务提交
|
||||
- [ ] 2.1 构建 TaskConfig 并调用 validate 预览 CLI 命令
|
||||
- 构建 TaskConfig:flow=api_full, processing_mode=full_window, window_mode=custom, window_start="2025-11-01 00:00", window_end="2026-02-20 00:00", window_split=day, window_split_days=30, force_full=True, tasks=全选 is_common=True 的任务
|
||||
- 调用 `POST /api/tasks/validate` 验证配置有效
|
||||
- 记录生成的 CLI 命令预览,确认参数完整(--flow api_full --processing-mode full_window --window-start ... --window-end ... --window-split day --window-split-days 30 --force-full --store-id 1 --tasks ...)
|
||||
- _Requirements: 2.1_
|
||||
|
||||
- [ ] 2.2 提交任务执行(`POST /api/execution/run`),记录 execution_id
|
||||
- 调用 `POST /api/execution/run` 提交任务
|
||||
- 记录返回的 execution_id
|
||||
- 确认任务状态变为 running
|
||||
- _Requirements: 2.2, 2.4_
|
||||
|
||||
- [ ] 3. 执行监控与 DEBUG
|
||||
- [ ] 3.1 启动监控循环:每 30 秒轮询状态和日志,检测错误/警告,最长等待 30 分钟
|
||||
- 轮询 `GET /api/execution/queue` 检查任务状态
|
||||
- 轮询 `GET /api/execution/{id}/logs` 获取增量日志
|
||||
- 检测日志中的 ERROR / CRITICAL / Traceback / Exception / WARNING
|
||||
- 记录每次轮询的时间戳和日志增量行数
|
||||
- 如果连续 30 分钟无新日志输出,报告超时警告
|
||||
- 任务完成(success/failed/cancelled)时停止轮询
|
||||
- _Requirements: 3.1, 3.2, 3.3, 3.4, 3.5_
|
||||
|
||||
- [ ] 3.2 对执行过程中发现的错误/警告进行 DEBUG 分析
|
||||
- 收集所有 ERROR 和 WARNING 日志行及其上下文(前后各 5 行)
|
||||
- 分析错误类型:API 超时、数据库连接、数据质量、配置问题等
|
||||
- 如果任务失败,获取完整 stderr 并分析根因
|
||||
- 记录 DEBUG 发现到报告中
|
||||
- _Requirements: 3.2, 3.5_
|
||||
|
||||
- [ ] 4. 性能计时与报告生成
|
||||
- [ ] 4.1 解析执行日志,提取精细计时数据
|
||||
- 从日志中提取每个窗口切片(30天)的开始/结束时间
|
||||
- 计算每个切片的耗时
|
||||
- 识别 ODS / DWD / DWS / INDEX 各阶段的耗时
|
||||
- 标注 Top-5 耗时最长的阶段/任务
|
||||
- _Requirements: 4.1, 4.2, 4.3, 4.4_
|
||||
|
||||
- [ ] 4.2 生成综合联调报告,输出到 SYSTEM_LOG_ROOT
|
||||
- 报告包含:执行概要(参数、时间、退出码)
|
||||
- 报告包含:性能报告(各切片耗时对比、Top-5 瓶颈)
|
||||
- 报告包含:DEBUG 报告(如有错误/警告)
|
||||
- 输出路径:`{SYSTEM_LOG_ROOT}/{date}__etl_integration_report.md`
|
||||
- 路径通过 `SYSTEM_LOG_ROOT` 环境变量获取,缺失时报错
|
||||
- _Requirements: 5.1, 5.2, 5.3, 5.4_
|
||||
|
||||
- [ ] 5. 服务清理
|
||||
- [ ] 5.1 停止后端和前端服务,清理资源
|
||||
- 停止 uvicorn 进程
|
||||
- 停止 pnpm dev 进程
|
||||
- 报告联调完成状态
|
||||
|
||||
## 说明
|
||||
|
||||
- 本 Spec 为运维联调任务,不涉及新功能代码开发
|
||||
- 不编写属性测试或单元测试,联调本身即为集成验证
|
||||
- 全选常用任务 = 任务注册表中 `is_common=True` 的所有任务(共 41 个)
|
||||
- "全部门店":当前系统仅有 site_id=1(默认管理员绑定),如需多门店需逐个执行
|
||||
- 监控允许空闲等待,最长 30 分钟无新日志才报超时
|
||||
- 报告输出路径遵循 export-paths 规范,通过 SYSTEM_LOG_ROOT 环境变量获取
|
||||
1
.kiro/specs/miniapp-core-business/.config.kiro
Normal file
1
.kiro/specs/miniapp-core-business/.config.kiro
Normal file
@@ -0,0 +1 @@
|
||||
{"generationMode": "requirements-first"}
|
||||
189
.kiro/specs/miniapp-core-business/requirements.md
Normal file
189
.kiro/specs/miniapp-core-business/requirements.md
Normal file
@@ -0,0 +1,189 @@
|
||||
# 需求文档:核心业务层 — 任务系统 + 备注系统 + 触发器机制(miniapp-core-business)
|
||||
|
||||
## 简介
|
||||
|
||||
本 SPEC 实现小程序的核心业务逻辑层,涵盖助教任务生成与管理、备注系统、后台触发器/轮询机制。系统基于 P1(miniapp-db-foundation)已建立的 `biz` Schema、P2(etl-dws-miniapp-extensions)提供的指数数据(WBI/NCI/RS 等通过 FDW 读取)、P3(miniapp-auth-system)提供的用户认证和助教绑定信息,在 `test_zqyy_app.biz` 中创建任务、备注、触发器等业务表,并在 FastAPI 后端实现对应的服务层和 API 端点。
|
||||
|
||||
## 术语表
|
||||
|
||||
- **Task_System**:任务系统,负责任务生成、状态管理、完成检测的完整后端服务
|
||||
- **Task_Generator**:任务生成器,每日 04:00 后运行的定时任务,基于指数数据为每个助教分配任务
|
||||
- **Task_Expiry_Checker**:任务有效期检查器,每小时运行的轮询任务,将超过有效期的任务标记为无效
|
||||
- **Recall_Completion_Detector**:召回完成检测器,ETL 数据更新后运行,检测助教是否已为匹配客户提供服务
|
||||
- **Note_Reclassify_Service**:备注重分类服务,召回完成时触发,将普通备注回溯重分类为回访备注
|
||||
- **Note_System**:备注系统,负责备注的创建、查询、删除和类型管理
|
||||
- **Trigger_System**:触发器系统,统一管理所有条件触发的后台任务(cron/event/interval 三种触发方式)
|
||||
- **coach_task**:助教任务记录,存储在 `biz.coach_tasks` 表中
|
||||
- **task_type**:任务类型枚举,取值为 `high_priority_recall`(高优先召回)/ `priority_recall`(优先召回)/ `follow_up_visit`(客户回访)/ `relationship_building`(关系构建)
|
||||
- **task_status**:任务状态枚举,取值为 `active`(有效)/ `inactive`(无效)/ `completed`(已完成)/ `abandoned`(已放弃)
|
||||
- **note_type**:备注类型枚举,取值为 `normal`(普通备注)/ `follow_up`(回访备注)/ `birthday`(生日信息)/ `abandon_reason`(放弃原因)
|
||||
- **trigger_type**:触发器类型枚举,取值为 `cron`(定时触发)/ `event`(事件触发)/ `interval`(间隔触发)
|
||||
- **priority_score**:优先级分数,取 max(WBI, NCI) 的快照值
|
||||
- **expires_at**:任务有效期,默认为 NULL(无有效期),当指数不再满足条件时填充为 `created_at + 48h`
|
||||
- **WBI**:老客挽回指数(Winback Index),0-10 分,来自 `fdw_etl` 的 DWS 数据
|
||||
- **NCI**:新客转化指数(New Customer Index),0-10 分,来自 `fdw_etl` 的 DWS 数据
|
||||
- **RS**:关系强度指数(Relationship Strength),0-10 分,客户-助教配对维度
|
||||
- **site_id**:门店标识符,类型为 `BIGINT`,用于多门店数据隔离
|
||||
- **assistant_id**:助教标识符,来自 ETL 的 `dim_assistant`,通过 `auth.user_assistant_binding` 与小程序用户关联
|
||||
- **member_id**:会员标识符,来自 ETL 的 `dim_member`
|
||||
- **FDW**:`postgres_fdw` 外部数据包装器,通过 `fdw_etl` Schema 读取 ETL 库数据
|
||||
- **Migration_Script**:存放在 `db/zqyy_app/migrations/` 中的纯 SQL 迁移脚本,以日期前缀命名
|
||||
|
||||
## 需求
|
||||
|
||||
### 需求 1:业务数据表创建
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要在 `biz` Schema 中创建任务、备注、触发器相关的数据表,以便支撑核心业务功能。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN Migration_Script 执行完成, THE Task_System SHALL 在 `biz` Schema 中创建 `coach_tasks` 表,包含 `id`(SERIAL PK)、`site_id`(BIGINT NOT NULL)、`assistant_id`(BIGINT NOT NULL)、`member_id`(BIGINT NOT NULL)、`task_type`(VARCHAR NOT NULL)、`status`(VARCHAR NOT NULL DEFAULT 'active')、`priority_score`(NUMERIC(4,2))、`expires_at`(TIMESTAMPTZ,默认 NULL)、`is_pinned`(BOOLEAN DEFAULT FALSE)、`abandon_reason`(TEXT)、`completed_at`(TIMESTAMPTZ)、`completed_task_type`(VARCHAR)、`parent_task_id`(INTEGER,FK → coach_tasks.id)、`created_at`(TIMESTAMPTZ DEFAULT NOW())、`updated_at`(TIMESTAMPTZ DEFAULT NOW())字段
|
||||
2. WHEN Migration_Script 执行完成, THE Task_System SHALL 在 `biz` Schema 中创建 `coach_task_history` 表,包含 `id`(SERIAL PK)、`task_id`(INTEGER FK → coach_tasks.id)、`action`(VARCHAR NOT NULL)、`old_status`(VARCHAR)、`new_status`(VARCHAR)、`detail`(JSONB)、`created_at`(TIMESTAMPTZ DEFAULT NOW())字段
|
||||
3. WHEN Migration_Script 执行完成, THE Note_System SHALL 在 `biz` Schema 中创建 `notes` 表,包含 `id`(SERIAL PK)、`site_id`(BIGINT NOT NULL)、`author_user_id`(INTEGER NOT NULL)、`target_member_id`(BIGINT NOT NULL)、`target_assistant_id`(BIGINT)、`note_type`(VARCHAR NOT NULL DEFAULT 'normal')、`content`(TEXT NOT NULL)、`task_id`(INTEGER,FK → coach_tasks.id,可空)、`ai_score`(NUMERIC(3,1))、`ai_evaluation`(TEXT)、`created_at`(TIMESTAMPTZ DEFAULT NOW())、`updated_at`(TIMESTAMPTZ DEFAULT NOW())字段
|
||||
4. WHEN Migration_Script 执行完成, THE Trigger_System SHALL 在 `biz` Schema 中创建 `trigger_jobs` 表,包含 `id`(SERIAL PK)、`job_type`(VARCHAR NOT NULL)、`job_name`(VARCHAR NOT NULL)、`trigger_type`(VARCHAR NOT NULL)、`trigger_config`(JSONB NOT NULL)、`last_run_at`(TIMESTAMPTZ)、`next_run_at`(TIMESTAMPTZ)、`status`(VARCHAR NOT NULL DEFAULT 'enabled')、`created_at`(TIMESTAMPTZ DEFAULT NOW())字段
|
||||
5. WHEN Migration_Script 执行完成, THE Trigger_System SHALL 在 `biz` Schema 中创建 `trigger_execution_log` 表,包含 `id`(SERIAL PK)、`job_id`(INTEGER FK → trigger_jobs.id)、`started_at`(TIMESTAMPTZ NOT NULL)、`finished_at`(TIMESTAMPTZ)、`status`(VARCHAR NOT NULL)、`result_summary`(JSONB)、`error_message`(TEXT)字段
|
||||
6. THE Migration_Script SHALL 对 `coach_tasks` 表创建索引:`(site_id, assistant_id, status)` 复合索引、`(site_id, member_id)` 复合索引、`(status, expires_at)` 复合索引用于有效期轮询
|
||||
7. THE Migration_Script SHALL 对 `notes` 表创建索引:`(site_id, target_member_id)` 复合索引、`(author_user_id)` 索引、`(task_id)` 索引
|
||||
8. THE Migration_Script SHALL 使用 `IF NOT EXISTS` / `OR REPLACE` 等幂等语法,确保重复执行不会报错
|
||||
9. THE Migration_Script SHALL 在脚本中包含回滚语句(以注释形式)
|
||||
|
||||
### 需求 2:触发器种子数据预置
|
||||
|
||||
**用户故事:** 作为系统管理员,我需要系统预置 4 个核心触发器配置,以便后台任务按预定规则自动运行。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 种子数据脚本执行完成, THE Trigger_System SHALL 在 `biz.trigger_jobs` 表中插入 `task_generator` 记录(trigger_type=`cron`,trigger_config 包含 cron 表达式 `0 4 * * *`,表示每日 04:00)
|
||||
2. WHEN 种子数据脚本执行完成, THE Trigger_System SHALL 在 `biz.trigger_jobs` 表中插入 `task_expiry_check` 记录(trigger_type=`interval`,trigger_config 包含间隔秒数 3600,表示每小时)
|
||||
3. WHEN 种子数据脚本执行完成, THE Trigger_System SHALL 在 `biz.trigger_jobs` 表中插入 `recall_completion_check` 记录(trigger_type=`event`,trigger_config 包含事件名 `etl_data_updated`)
|
||||
4. WHEN 种子数据脚本执行完成, THE Trigger_System SHALL 在 `biz.trigger_jobs` 表中插入 `note_reclassify_backfill` 记录(trigger_type=`event`,trigger_config 包含事件名 `recall_completed`)
|
||||
5. THE 种子数据脚本 SHALL 使用 `ON CONFLICT DO NOTHING` 语法,确保重复执行不会产生重复数据
|
||||
|
||||
### 需求 3:任务生成器
|
||||
|
||||
**用户故事:** 作为助教,我每天打开小程序能看到系统为我分配的任务列表,按优先级排序,以便我知道今天应该优先联系哪些客户。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN Task_Generator 运行时, THE Task_System SHALL 从 `fdw_etl` 读取每个助教关联的客户的 WBI、NCI、RS 指数数据
|
||||
2. WHEN 某客户的 max(WBI, NCI) > 7, THE Task_Generator SHALL 为该客户-助教配对生成 `high_priority_recall` 类型任务,priority_score 记录为 max(WBI, NCI) 的快照值
|
||||
3. WHEN 某客户的 max(WBI, NCI) > 5 且 ≤ 7, THE Task_Generator SHALL 为该客户-助教配对生成 `priority_recall` 类型任务
|
||||
4. WHEN 某助教已完成某客户的召回任务且该客户无回访备注, THE Task_Generator SHALL 为该客户-助教配对生成 `follow_up_visit` 类型任务
|
||||
5. WHEN 某客户-助教配对的 RS < 6, THE Task_Generator SHALL 为该客户-助教配对生成 `relationship_building` 类型任务
|
||||
6. WHEN Task_Generator 生成任务时发现已存在相同 `(site_id, assistant_id, member_id, task_type)` 且 status 为 `active` 的任务, THE Task_Generator SHALL 跳过该任务,不创建新记录
|
||||
7. WHEN Task_Generator 生成任务时发现已存在相同 `(site_id, assistant_id, member_id)` 但 task_type 不同且 status 为 `active` 的任务, THE Task_Generator SHALL 将旧任务的 status 更新为 `inactive`,创建新类型的任务,并将新任务的 `parent_task_id` 指向旧任务
|
||||
8. WHEN Task_Generator 将旧任务标记为 `inactive`(因类型变更), THE Task_System SHALL 将旧任务的 `expires_at` 保持为 NULL(类型变更导致的无效不设有效期)
|
||||
9. WHEN 任务类型按优先级排序时, THE Task_Generator SHALL 按以下优先级从高到低处理:`high_priority_recall`(0)> `priority_recall`(0)> `follow_up_visit`(1)> `relationship_building`(2),同优先级内按 priority_score 降序排列
|
||||
10. WHEN Task_Generator 运行完成, THE Task_System SHALL 在 `biz.coach_task_history` 中记录本次生成的所有操作(创建、跳过、类型变更)
|
||||
|
||||
### 需求 4:48 小时滞留机制
|
||||
|
||||
**用户故事:** 作为系统,回访任务至少保留 48 小时,到期后自动失效,以便助教有足够时间完成回访。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN Task_Generator 计算出某客户不再满足当前任务条件(指数变化)但该任务仍为 `active` 且 `expires_at` 为 NULL, THE Task_System SHALL 将该任务的 `expires_at` 填充为 `created_at + 48 小时`,status 保持 `active`
|
||||
2. WHEN Task_Expiry_Checker 每小时运行时, THE Task_System SHALL 查询所有 status 为 `active` 且 `expires_at` 不为 NULL 且 `expires_at` < 当前时间的任务,将其 status 更新为 `inactive`
|
||||
3. WHEN 一个已有 `expires_at` 的 `follow_up_visit` 任务存在,且 Task_Generator 再次生成同客户-助教的 `follow_up_visit` 任务, THE Task_System SHALL 将旧任务标记为 `inactive`,创建新的 `follow_up_visit` 任务(新任务顶替旧任务)
|
||||
4. WHEN 任务状态变更时, THE Task_System SHALL 在 `biz.coach_task_history` 中记录变更详情(包含旧状态、新状态、变更原因)
|
||||
|
||||
### 需求 5:召回完成检测
|
||||
|
||||
**用户故事:** 作为助教,我完成召回任务后(客户到店被我服务),系统自动标记任务完成,无需手动操作。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN ETL 数据更新后 Recall_Completion_Detector 运行时, THE Task_System SHALL 查询 `fdw_etl` 中的服务记录(`v_dwd_session_detail` 或等效视图),检测是否有新的助教-客户服务记录
|
||||
2. WHEN 检测到助教 A 为客户 B 提供了新的服务记录, THE Task_System SHALL 查找助教 A 对客户 B 的所有 status 为 `active` 的任务(无论任务类型),将其 status 更新为 `completed`
|
||||
3. WHEN 任务被标记为 `completed`, THE Task_System SHALL 记录 `completed_at` 为当前时间,`completed_task_type` 为任务完成时的 task_type 快照
|
||||
4. WHEN 召回完成后发现该客户-助教配对无回访备注, THE Task_System SHALL 触发 `note_reclassify_backfill` 事件,检查是否需要数据回溯
|
||||
|
||||
### 需求 6:数据回溯机制
|
||||
|
||||
**用户故事:** 作为系统,当 ETL 数据延迟导致召回完成晚于备注提交时,需要回溯重分类备注,确保回访任务正确完成。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 召回任务完成时, THE Note_Reclassify_Service SHALL 查询该助教在召回服务结束时间之后为该客户添加的所有 note_type 为 `normal` 的备注
|
||||
2. WHEN 找到符合条件的普通备注, THE Note_Reclassify_Service SHALL 将第一条(按时间最早)普通备注的 note_type 更新为 `follow_up`,并关联到对应的回访任务(设置 `task_id`)
|
||||
3. WHEN 备注被重分类为 `follow_up` 后, THE Note_Reclassify_Service SHALL 触发 AI 应用 6 的含金量评分流程(评分结果写入 `notes.ai_score` 和 `notes.ai_evaluation`)
|
||||
4. WHEN AI 评分结果 ≥ 6 分, THE Task_System SHALL 将对应的回访任务标记为 `completed`
|
||||
5. WHEN AI 评分结果 < 6 分, THE Task_System SHALL 保持回访任务的当前状态不变,等待助教提交新的备注
|
||||
|
||||
### 需求 7:任务操作 API
|
||||
|
||||
**用户故事:** 作为助教,我可以查看任务列表、置顶/放弃任务、取消置顶/取消放弃,以便灵活管理我的工作优先级。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 助教请求任务列表, THE Task_System SHALL 返回该助教在当前 `site_id` 下所有 status 为 `active` 的任务,按优先级分组(priority 0 → 1 → 2),同优先级内按 `is_pinned` 降序、`priority_score` 降序排列
|
||||
2. WHEN 助教置顶某任务, THE Task_System SHALL 将该任务的 `is_pinned` 更新为 TRUE,并在 `coach_task_history` 中记录操作
|
||||
3. WHEN 助教取消置顶某任务, THE Task_System SHALL 将该任务的 `is_pinned` 更新为 FALSE,并在 `coach_task_history` 中记录操作
|
||||
4. WHEN 助教放弃某任务且提供了放弃原因, THE Task_System SHALL 将该任务的 status 更新为 `abandoned`,记录 `abandon_reason`,并在 `coach_task_history` 中记录操作
|
||||
5. IF 助教放弃任务时未提供放弃原因(空字符串或纯空白字符), THEN THE Task_System SHALL 返回 HTTP 422 错误,拒绝操作
|
||||
6. WHEN 助教取消放弃某任务, THE Task_System SHALL 将该任务的 status 恢复为 `active`,清空 `abandon_reason`,并在 `coach_task_history` 中记录操作
|
||||
7. WHEN 助教请求任务详情, THE Task_System SHALL 返回任务的完整信息,包含客户基本信息(通过 FDW 查询)、指数快照、备注列表、近期服务记录
|
||||
8. WHEN 助教请求已放弃任务列表, THE Task_System SHALL 返回该助教在当前 `site_id` 下所有 status 为 `abandoned` 的任务列表
|
||||
|
||||
### 需求 8:备注 CRUD
|
||||
|
||||
**用户故事:** 作为助教,我可以为客户添加、查看、删除备注,以便记录客户信息和服务跟进情况。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 助教为某客户创建备注(提供 content 和可选的 note_type), THE Note_System SHALL 在 `biz.notes` 表中创建记录,`author_user_id` 为当前登录用户 ID,`site_id` 为当前店铺 ID
|
||||
2. WHEN 助教在回访任务上下文中创建备注, THE Note_System SHALL 将 note_type 设置为 `follow_up`,并将 `task_id` 关联到对应的回访任务
|
||||
3. WHEN 回访备注创建后, THE Note_System SHALL 触发 AI 应用 6 的含金量评分流程,将评分结果写入 `notes.ai_score` 和 `notes.ai_evaluation`
|
||||
4. WHEN AI 评分结果 ≥ 6 分, THE Task_System SHALL 将对应的回访任务标记为 `completed`
|
||||
5. WHEN AI 评分结果 < 6 分, THE Task_System SHALL 保持回访任务状态不变
|
||||
6. WHEN 助教查询某客户的备注列表, THE Note_System SHALL 返回该客户在当前 `site_id` 下的所有备注,按 `created_at` 降序排列
|
||||
7. WHEN 助教删除某条备注, THE Note_System SHALL 从 `biz.notes` 表中删除该记录
|
||||
8. IF 助教创建备注时 content 为空字符串或纯空白字符, THEN THE Note_System SHALL 返回 HTTP 422 错误,拒绝创建
|
||||
|
||||
### 需求 9:生日信息隔离存储
|
||||
|
||||
**用户故事:** 作为助教,我为客户记录的生日信息独立于 ETL 数据,不会被 ETL 数据更新覆盖。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 助教为某客户添加生日备注(note_type=`birthday`), THE Note_System SHALL 在 `biz.notes` 表中创建记录,content 存储生日值
|
||||
2. THE Note_System SHALL 确保 note_type 为 `birthday` 的备注记录独立于 ETL 数据管道,ETL 数据更新不会修改或删除这些记录
|
||||
3. WHEN 查询某客户的生日信息时, THE Note_System SHALL 从 `biz.notes` 表中查询 note_type 为 `birthday` 的最新记录,返回 content(生日值)和 `author_user_id`(记录者)
|
||||
|
||||
### 需求 10:触发器调度框架
|
||||
|
||||
**用户故事:** 作为系统,我需要一个统一的触发器调度框架来管理所有后台任务的触发和执行。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE Trigger_System SHALL 支持三种触发方式:`cron`(基于 cron 表达式的定时触发)、`event`(基于事件名的事件触发)、`interval`(基于固定间隔秒数的间隔触发)
|
||||
2. WHEN cron 类型触发器到达触发时间, THE Trigger_System SHALL 执行对应的任务处理函数,并更新 `last_run_at` 和 `next_run_at`
|
||||
3. WHEN event 类型触发器收到匹配的事件通知, THE Trigger_System SHALL 执行对应的任务处理函数,并更新 `last_run_at`
|
||||
4. WHEN interval 类型触发器距上次运行超过配置的间隔秒数, THE Trigger_System SHALL 执行对应的任务处理函数,并更新 `last_run_at` 和 `next_run_at`
|
||||
5. WHEN 触发器执行完成(成功或失败), THE Trigger_System SHALL 在 `biz.trigger_execution_log` 中记录执行日志,包含开始时间、结束时间、执行状态、结果摘要或错误信息
|
||||
6. WHEN 触发器 status 为 `disabled`, THE Trigger_System SHALL 跳过该触发器的执行
|
||||
7. IF 触发器执行过程中发生异常, THEN THE Trigger_System SHALL 捕获异常,在执行日志中记录错误信息,触发器状态保持 `enabled`(不因单次失败而禁用)
|
||||
|
||||
### 需求 11:迁移脚本管理
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要所有数据库变更都有对应的迁移脚本,以便变更可追溯、可重放。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. THE Migration_Script SHALL 将所有业务相关表的 DDL 存放在 `db/zqyy_app/migrations/` 目录中
|
||||
2. THE Migration_Script SHALL 使用日期前缀命名(格式:`YYYY-MM-DD__<描述>.sql`)
|
||||
3. THE Migration_Script SHALL 使用 UTF-8 编码,纯 SQL(非 ORM)
|
||||
4. THE Migration_Script SHALL 在每个脚本中包含回滚语句(以注释形式)
|
||||
5. THE Migration_Script SHALL 使用幂等语法(`IF NOT EXISTS`、`ON CONFLICT DO NOTHING`),确保重复执行不会报错
|
||||
|
||||
### 需求 12:DDL 测试库落库与文档同步
|
||||
|
||||
**用户故事:** 作为后端开发者,我需要所有 DDL 变更在测试库(`test_zqyy_app`)中实际执行验证,并同步更新数据库手册和 DDL 基线,确保文档与实际 Schema 一致。
|
||||
|
||||
#### 验收标准
|
||||
|
||||
1. WHEN 迁移脚本编写完成, THE Task_System SHALL 在 `test_zqyy_app` 测试库中执行独立的迁移脚本(`db/zqyy_app/migrations/` 下的 SQL 文件)进行落库,验证无错误
|
||||
2. WHEN 迁移脚本执行成功, THE Task_System SHALL 创建或更新 `biz` Schema 相关的数据库手册文档,包含变更说明、兼容性影响、回滚策略、验证 SQL(至少 3 条)
|
||||
3. WHEN 迁移脚本执行成功, THE Task_System SHALL 运行 `python scripts/ops/gen_consolidated_ddl.py` 将新表的 DDL 合并到主 DDL 基线文件中
|
||||
4. WHEN 种子数据脚本执行成功, THE Task_System SHALL 在数据库手册中记录种子数据内容(触发器配置)
|
||||
5. THE Task_System SHALL 同步更新所有相关文档(包括数据库手册、DDL 基线、部署文档等),确保文档与实际 Schema 保持一致
|
||||
@@ -96,29 +96,29 @@
|
||||
- `default_registry.register("DWS_SPENDING_POWER_INDEX", SpendingPowerIndexTask, requires_db_config=False, layer="INDEX", depends_on=["DWS_MEMBER_CONSUMPTION"])`
|
||||
- _Requirements: 9.1, 9.2_
|
||||
|
||||
- [-] 6. 配置种子数据
|
||||
- [x] 6. 配置种子数据
|
||||
- [x] 6.1 在 `db/etl_feiqiu/seeds/seed_index_parameters.sql` 中追加 SPI 参数
|
||||
- 插入 `index_type='SPI'` 的全部参数行(窗口、基数、权重、映射、稳定性)
|
||||
- 金额压缩基数使用合理初始默认值
|
||||
- _Requirements: 7.1, 7.4, 8.5_
|
||||
- [~] 6.2 在测试库执行种子数据脚本
|
||||
- [x] 6.2 在测试库执行种子数据脚本
|
||||
- 通过 TEST_DB_DSN 连接测试库执行 INSERT
|
||||
- _Requirements: 7.1_
|
||||
|
||||
- [~] 7. 检查点 - 确保单元测试和集成验证通过
|
||||
- [x] 7. 检查点 - 确保单元测试和集成验证通过
|
||||
- 运行 `cd apps/etl/connectors/feiqiu && pytest tests/unit/test_spi_task.py -v`
|
||||
- 确保所有测试通过,如有问题请询问用户。
|
||||
|
||||
- [ ] 8. 文档更新
|
||||
- [~] 8.1 编写数据库手册 `docs/database/BD_Manual_dws_member_spending_power_index.md`
|
||||
- [x] 8. 文档更新
|
||||
- [x] 8.1 编写数据库手册 `docs/database/BD_Manual_dws_member_spending_power_index.md`
|
||||
- 包含表结构、字段说明、索引、验证 SQL(至少 3 条)、兼容性说明、回滚策略
|
||||
- _Requirements: 11.1_
|
||||
- [~] 8.2 更新 ETL 任务文档 `apps/etl/connectors/feiqiu/docs/etl_tasks/index_tasks.md`
|
||||
- [x] 8.2 更新 ETL 任务文档 `apps/etl/connectors/feiqiu/docs/etl_tasks/index_tasks.md`
|
||||
- 新增 DWS_SPENDING_POWER_INDEX 章节,包含算法公式、参数清单、数据来源、计算流程
|
||||
- 更新概述表格和继承体系图
|
||||
- _Requirements: 11.2, 11.3_
|
||||
|
||||
- [~] 9. 最终检查点 - 确保所有测试通过
|
||||
- [x] 9. 最终检查点 - 确保所有测试通过
|
||||
- 运行属性测试:`cd C:\NeoZQYY && pytest tests/test_spi_properties.py -v`
|
||||
- 运行单元测试:`cd apps/etl/connectors/feiqiu && pytest tests/unit/test_spi_task.py -v`
|
||||
- 确保所有测试通过,如有问题请询问用户。
|
||||
|
||||
35
.kiro/steering/doc-map.md
Normal file
35
.kiro/steering/doc-map.md
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
inclusion: manual
|
||||
name: doc-map
|
||||
description: 项目文档地图索引。需要定位文档、理解项目结构、查找规范时手动加载。
|
||||
---
|
||||
|
||||
# 文档地图索引
|
||||
|
||||
完整文档地图:`#[[file:docs/DOCUMENTATION-MAP.md]]`
|
||||
|
||||
## 快速定位
|
||||
|
||||
| 需要什么 | 去哪里找 |
|
||||
|---------|---------|
|
||||
| DB 变更审计模板 | `docs/database/BD_Manual_*.md` |
|
||||
| API 端点参考 | `apps/backend/docs/API-REFERENCE.md` |
|
||||
| ETL 任务说明 | `apps/etl/connectors/feiqiu/docs/etl_tasks/` |
|
||||
| ETL 业务规则 | `apps/etl/connectors/feiqiu/docs/business-rules/` |
|
||||
| 输出路径规范 | `docs/deployment/EXPORT-PATHS.md` |
|
||||
| 上线检查清单 | `docs/deployment/LAUNCH-CHECKLIST.md` |
|
||||
| 变更审计记录 | `docs/audit/changes/` |
|
||||
| PRD / Spec 拆分 | `docs/prd/specs/` |
|
||||
| 小程序 UI 原型 | `docs/h5_ui/pages/` |
|
||||
| 迁移脚本 | `db/etl_feiqiu/migrations/` + `db/zqyy_app/migrations/` |
|
||||
| DDL 基线 | `docs/database/ddl/` |
|
||||
| 模块 README | 各 `apps/*/README.md` + `packages/shared/README.md` + `db/README.md` |
|
||||
| Kiro Spec | `.kiro/specs/<spec-name>/` (requirements + design + tasks) |
|
||||
|
||||
## 行为规范提示
|
||||
|
||||
- 新增 DB 表/字段 → 必须写 `BD_Manual_*.md`(见 `db-docs.md` steering)
|
||||
- 新增输出路径 → 先加 `.env` 变量,再更新 `EXPORT-PATHS.md`(见 `export-paths.md` steering)
|
||||
- 逻辑改动 → 检查是否命中审计条件(见 `governance.md` steering)
|
||||
- 新增/修改 API → 同步更新 `API-REFERENCE.md`
|
||||
- 新增 ETL 任务 → 同步更新 `docs/etl_tasks/` 对应文档
|
||||
38
README.md
38
README.md
@@ -4,19 +4,19 @@
|
||||
|
||||
## 项目结构
|
||||
|
||||
| 目录 | 说明 |
|
||||
|------|------|
|
||||
| apps/etl/connectors/feiqiu/ | 飞球 Connector(数据源连接器) |
|
||||
| apps/backend/ | FastAPI 后端(小程序 API) |
|
||||
| apps/miniprogram/ | 微信小程序(Donut + TDesign) |
|
||||
| apps/admin-web/ | 管理后台(React + Vite + Ant Design) |
|
||||
| packages/shared/ | 跨项目共享包(枚举、金额精度、时间工具) |
|
||||
| db/ | 数据库 DDL、迁移、种子脚本 |
|
||||
| docs/ | 文档(PRD、契约、权限矩阵、架构等) |
|
||||
| infra/ | 基础设施配置 |
|
||||
| scripts/ | 运维/工具脚本 |
|
||||
| samples/ | 示例数据与配置 |
|
||||
| tests/ | Monorepo 级属性测试 |
|
||||
| 目录 | 说明 | 文档 |
|
||||
|------|------|------|
|
||||
| `apps/etl/connectors/feiqiu/` | 飞球 Connector(API → ODS → DWD → DWS) | [docs/](apps/etl/connectors/feiqiu/docs/) |
|
||||
| `apps/backend/` | FastAPI 后端(13 个路由 · JWT 双认证 · WebSocket) | [README](apps/backend/README.md) · [API 参考](apps/backend/docs/API-REFERENCE.md) |
|
||||
| `apps/miniprogram/` | 微信小程序(Donut + TDesign + TypeScript) | [README](apps/miniprogram/README.md) |
|
||||
| `apps/admin-web/` | 管理后台(React + Vite + Ant Design) | [README](apps/admin-web/README.md) |
|
||||
| `apps/mcp-server/` | MCP Server(AI 工具集成,PostgreSQL 只读查询) | [README](apps/mcp-server/README.md) |
|
||||
| `packages/shared/` | 共享包(枚举、金额精度、时间工具) | [README](packages/shared/README.md) |
|
||||
| `db/etl_feiqiu/` | ETL 数据库(6 层 Schema:meta/ods/dwd/core/dws/app) | [README](db/etl_feiqiu/README.md) |
|
||||
| `db/zqyy_app/` | 业务数据库(auth 认证 + 任务队列 + 调度) | [README](db/zqyy_app/README.md) |
|
||||
| `docs/` | 项目文档中心(架构、数据库、部署、审计) | [README](docs/README.md) |
|
||||
| `scripts/` | 运维/工具脚本 | — |
|
||||
| `tests/` | Monorepo 级属性测试 | — |
|
||||
|
||||
## 快速开始
|
||||
|
||||
@@ -45,8 +45,10 @@ pytest tests/unit
|
||||
|
||||
## 技术栈
|
||||
|
||||
- Python 3.10+, uv workspace
|
||||
- PostgreSQL(六层 Schema:meta/ods/dwd/core/dws/app)
|
||||
- FastAPI + uvicorn
|
||||
- React + Vite + Ant Design(管理后台)
|
||||
- Donut + TDesign(微信小程序)
|
||||
- Python 3.10+, uv workspace(4 个成员:etl、backend、mcp-server、shared)
|
||||
- PostgreSQL 四库:`etl_feiqiu` / `test_etl_feiqiu`(ETL)、`zqyy_app` / `test_zqyy_app`(业务)
|
||||
- FastAPI + Uvicorn(后端 API)
|
||||
- React 19 + Vite 6 + Ant Design 5(管理后台,独立 pnpm)
|
||||
- 微信小程序原生 + Donut + TDesign + TypeScript(C 端)
|
||||
- JWT 双认证(管理后台用户名密码 + 小程序微信登录)
|
||||
- 多门店隔离(`site_id` + RLS)
|
||||
|
||||
155
apps/admin-web/README.md
Normal file
155
apps/admin-web/README.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# apps/admin-web — 管理后台
|
||||
|
||||
基于 React + Vite + Ant Design 构建的 ETL 管理后台,提供任务管理、调度配置、数据查看、ETL 监控和运维控制功能。
|
||||
|
||||
## 技术栈
|
||||
|
||||
- React 19 + TypeScript
|
||||
- Vite 6(构建工具)
|
||||
- Ant Design 5(UI 组件库)
|
||||
- Zustand 5(状态管理)
|
||||
- React Router DOM 7(路由)
|
||||
- Axios(HTTP 客户端)
|
||||
- Day.js(日期处理)
|
||||
|
||||
## 目录结构
|
||||
|
||||
```
|
||||
apps/admin-web/
|
||||
├── src/
|
||||
│ ├── App.tsx # 主布局 + 路由配置 + 路由守卫
|
||||
│ ├── main.tsx # 应用入口
|
||||
│ ├── pages/ # 8 个功能页面
|
||||
│ │ ├── Login.tsx # 登录页
|
||||
│ │ ├── TaskConfig.tsx # 任务配置(Flow 选择 + 任务勾选 + 参数设置)
|
||||
│ │ ├── TaskManager.tsx # 任务管理(队列 + 执行历史 + 实时日志)
|
||||
│ │ ├── ETLStatus.tsx # ETL 状态(游标监控 + 最近执行)
|
||||
│ │ ├── DBViewer.tsx # 数据库查看器(Schema/表/列浏览 + SQL 执行)
|
||||
│ │ ├── LogViewer.tsx # 日志查看器
|
||||
│ │ ├── EnvConfig.tsx # 环境配置管理
|
||||
│ │ └── OpsPanel.tsx # 运维面板(服务启停 + Git + 系统监控)
|
||||
│ ├── components/ # 可复用组件
|
||||
│ │ ├── DwdTableSelector.tsx # DWD 表选择器
|
||||
│ │ ├── ErrorBoundary.tsx # 错误边界
|
||||
│ │ ├── LogStream.tsx # 实时日志流组件
|
||||
│ │ ├── ScheduleTab.tsx # 调度配置标签页
|
||||
│ │ └── TaskSelector.tsx # 任务选择器
|
||||
│ ├── api/ # API 调用层
|
||||
│ │ ├── client.ts # Axios 实例(baseURL + JWT 拦截器)
|
||||
│ │ ├── tasks.ts # 任务配置 API
|
||||
│ │ ├── execution.ts # 任务执行 API
|
||||
│ │ ├── schedules.ts # 调度管理 API
|
||||
│ │ ├── etlStatus.ts # ETL 状态 API
|
||||
│ │ ├── dbViewer.ts # 数据库查看器 API
|
||||
│ │ ├── envConfig.ts # 环境配置 API
|
||||
│ │ └── opsPanel.ts # 运维面板 API
|
||||
│ ├── store/
|
||||
│ │ └── authStore.ts # Zustand 认证状态(JWT 持久化 + hydrate)
|
||||
│ └── types/ # TypeScript 类型定义
|
||||
├── index.html # HTML 入口
|
||||
├── vite.config.ts # Vite 配置
|
||||
├── tsconfig.json # TypeScript 配置
|
||||
└── package.json # 依赖声明
|
||||
```
|
||||
|
||||
## 启动
|
||||
|
||||
```bash
|
||||
cd apps/admin-web
|
||||
pnpm install
|
||||
pnpm dev # 启动开发服务器(默认 http://localhost:5173)
|
||||
pnpm build # 生产构建
|
||||
pnpm preview # 预览生产构建
|
||||
```
|
||||
|
||||
## 页面功能
|
||||
|
||||
### 登录页 (`/login`)
|
||||
用户名密码登录,JWT 令牌存储在 localStorage,通过 Zustand `authStore` 管理。
|
||||
|
||||
### 任务配置 (`/`)
|
||||
ETL 任务的核心配置界面:
|
||||
- 选择执行流程(7 种 Flow)
|
||||
- 勾选要执行的任务(按业务域分组)
|
||||
- 设置处理模式(增量/校验/全窗口)
|
||||
- 配置时间窗口参数
|
||||
- 实时预览生成的 CLI 命令
|
||||
- 一键执行或加入队列
|
||||
|
||||
### 任务管理 (`/task-manager`)
|
||||
- 查看执行队列(拖拽排序、删除、取消)
|
||||
- 执行历史列表(状态、耗时、退出码)
|
||||
- 实时日志流(WebSocket 推送)
|
||||
|
||||
### ETL 状态 (`/etl-status`)
|
||||
- 各 ODS 表的数据游标(最后抓取时间、记录数)
|
||||
- 最近 50 条执行记录
|
||||
|
||||
### 数据库查看器 (`/db-viewer`)
|
||||
- 浏览 ETL 数据库 Schema → 表 → 列结构
|
||||
- 执行只读 SQL 查询(带安全限制)
|
||||
- 结果表格展示
|
||||
|
||||
### 日志查看器 (`/log-viewer`)
|
||||
查看历史执行的完整日志输出。
|
||||
|
||||
### 环境配置 (`/env-config`)
|
||||
查看和编辑根 `.env` 文件中的配置项(敏感值脱敏显示)。
|
||||
|
||||
### 运维面板 (`/ops-panel`)
|
||||
- 服务状态监控(test/prod 环境的 PID、内存、CPU)
|
||||
- 服务启停控制(启动/停止/重启)
|
||||
- Git 状态查看和 pull 操作
|
||||
- 依赖同步(`uv sync`)
|
||||
- 系统资源概况(CPU、内存、磁盘)
|
||||
|
||||
## 认证与路由守卫
|
||||
|
||||
- 所有功能页面通过 `PrivateRoute` 组件保护
|
||||
- 未登录自动重定向到 `/login`
|
||||
- JWT 令牌通过 Axios 拦截器自动附加到请求头
|
||||
- 应用启动时通过 `hydrate()` 从 localStorage 恢复认证状态
|
||||
|
||||
## API 层
|
||||
|
||||
`src/api/client.ts` 创建 Axios 实例,配置:
|
||||
- `baseURL`:默认 `http://localhost:8000`
|
||||
- 请求拦截器:自动附加 `Authorization: Bearer <token>`
|
||||
- 响应拦截器:401 时自动清除认证状态并跳转登录
|
||||
|
||||
各 API 模块对应后端路由,提供类型安全的调用接口。
|
||||
|
||||
## 状态管理
|
||||
|
||||
使用 Zustand 管理全局认证状态:
|
||||
- `isAuthenticated`:是否已登录
|
||||
- `token` / `refreshToken`:JWT 令牌
|
||||
- `login()` / `logout()` / `hydrate()`:状态操作
|
||||
|
||||
## 与后端的关系
|
||||
|
||||
管理后台通过 REST API 与 `apps/backend/` 通信:
|
||||
- 开发环境:Vite 代理到 `http://localhost:8000`
|
||||
- 生产环境:通过 `CORS_ORIGINS` 配置跨域
|
||||
|
||||
## 依赖
|
||||
|
||||
```json
|
||||
{
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"react-router-dom": "^7.6.1",
|
||||
"antd": "^5.24.7",
|
||||
"axios": "^1.9.0",
|
||||
"zustand": "^5.0.5",
|
||||
"dayjs": "^1.11.19"
|
||||
}
|
||||
```
|
||||
|
||||
## Roadmap
|
||||
|
||||
- [ ] 用户申请审批界面(对接 `/api/xcx-auth` 审批端点)
|
||||
- [ ] 数据看板页面(助教业绩、财务日报、客户分析)
|
||||
- [ ] 权限管理界面(角色/权限配置)
|
||||
- [ ] 暗色主题支持
|
||||
- [ ] 国际化(i18n)
|
||||
@@ -5,9 +5,15 @@ FastAPI 依赖注入:从 JWT 提取当前用户信息。
|
||||
@router.get("/protected")
|
||||
async def protected_endpoint(user: CurrentUser = Depends(get_current_user)):
|
||||
print(user.user_id, user.site_id)
|
||||
|
||||
# 允许 pending 用户(受限令牌)访问
|
||||
@router.get("/apply")
|
||||
async def apply_endpoint(user: CurrentUser = Depends(get_current_user_or_limited)):
|
||||
if user.limited:
|
||||
... # 受限逻辑
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
@@ -24,7 +30,10 @@ class CurrentUser:
|
||||
"""从 JWT 解析出的当前用户上下文。"""
|
||||
|
||||
user_id: int
|
||||
site_id: int
|
||||
site_id: int = 0
|
||||
roles: list[str] = field(default_factory=list)
|
||||
status: str = "pending"
|
||||
limited: bool = False
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
@@ -33,7 +42,7 @@ async def get_current_user(
|
||||
"""
|
||||
FastAPI 依赖:从 Authorization header 提取 JWT,验证后返回用户信息。
|
||||
|
||||
失败时抛出 401。
|
||||
要求完整令牌(非 limited),失败时抛出 401。
|
||||
"""
|
||||
token = credentials.credentials
|
||||
try:
|
||||
@@ -45,6 +54,14 @@ async def get_current_user(
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
# 受限令牌不允许通过此依赖
|
||||
if payload.get("limited"):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="受限令牌无法访问此端点",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
user_id_raw = payload.get("sub")
|
||||
site_id = payload.get("site_id")
|
||||
|
||||
@@ -64,4 +81,78 @@ async def get_current_user(
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
return CurrentUser(user_id=user_id, site_id=site_id)
|
||||
roles = payload.get("roles", [])
|
||||
|
||||
return CurrentUser(
|
||||
user_id=user_id,
|
||||
site_id=site_id,
|
||||
roles=roles,
|
||||
status="approved",
|
||||
limited=False,
|
||||
)
|
||||
|
||||
|
||||
async def get_current_user_or_limited(
|
||||
credentials: HTTPAuthorizationCredentials = Depends(_bearer_scheme),
|
||||
) -> CurrentUser:
|
||||
"""
|
||||
FastAPI 依赖:允许 pending 用户(受限令牌)访问。
|
||||
|
||||
- 受限令牌(limited=True):返回 CurrentUser(limited=True, roles=[], status="pending")
|
||||
- 完整令牌:正常返回 CurrentUser
|
||||
"""
|
||||
token = credentials.credentials
|
||||
try:
|
||||
payload = decode_access_token(token)
|
||||
except JWTError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="无效的令牌",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
user_id_raw = payload.get("sub")
|
||||
if user_id_raw is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="令牌缺少必要字段",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
try:
|
||||
user_id = int(user_id_raw)
|
||||
except (TypeError, ValueError):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="令牌中 user_id 格式无效",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
# 受限令牌:pending 用户
|
||||
if payload.get("limited"):
|
||||
return CurrentUser(
|
||||
user_id=user_id,
|
||||
site_id=0,
|
||||
roles=[],
|
||||
status="pending",
|
||||
limited=True,
|
||||
)
|
||||
|
||||
# 完整令牌:要求 site_id
|
||||
site_id = payload.get("site_id")
|
||||
if site_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="令牌缺少必要字段",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
roles = payload.get("roles", [])
|
||||
|
||||
return CurrentUser(
|
||||
user_id=user_id,
|
||||
site_id=site_id,
|
||||
roles=roles,
|
||||
status="approved",
|
||||
limited=False,
|
||||
)
|
||||
|
||||
@@ -27,11 +27,14 @@ def hash_password(password: str) -> str:
|
||||
return bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
|
||||
|
||||
|
||||
def create_access_token(user_id: int, site_id: int) -> str:
|
||||
def create_access_token(
|
||||
user_id: int, site_id: int, roles: list[str] | None = None
|
||||
) -> str:
|
||||
"""
|
||||
生成 access_token。
|
||||
|
||||
payload: sub=user_id, site_id, type=access, exp
|
||||
payload: sub=user_id, site_id, roles, type=access, exp
|
||||
roles 参数默认 None,保持向后兼容。
|
||||
"""
|
||||
expire = datetime.now(timezone.utc) + timedelta(
|
||||
minutes=config.JWT_ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
@@ -42,6 +45,8 @@ def create_access_token(user_id: int, site_id: int) -> str:
|
||||
"type": "access",
|
||||
"exp": expire,
|
||||
}
|
||||
if roles is not None:
|
||||
payload["roles"] = roles
|
||||
return jwt.encode(payload, config.JWT_SECRET_KEY, algorithm=config.JWT_ALGORITHM)
|
||||
|
||||
|
||||
@@ -63,15 +68,46 @@ def create_refresh_token(user_id: int, site_id: int) -> str:
|
||||
return jwt.encode(payload, config.JWT_SECRET_KEY, algorithm=config.JWT_ALGORITHM)
|
||||
|
||||
|
||||
def create_token_pair(user_id: int, site_id: int) -> dict[str, str]:
|
||||
def create_token_pair(user_id: int, site_id: int, roles: list[str] | None = None) -> dict[str, str]:
|
||||
"""生成 access_token + refresh_token 令牌对。"""
|
||||
return {
|
||||
"access_token": create_access_token(user_id, site_id),
|
||||
"access_token": create_access_token(user_id, site_id, roles=roles),
|
||||
"refresh_token": create_refresh_token(user_id, site_id),
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_limited_token_pair(user_id: int) -> dict[str, str]:
|
||||
"""
|
||||
为 pending 用户签发受限令牌。
|
||||
|
||||
payload 不含 site_id 和 roles,仅包含 user_id + type + limited=True。
|
||||
受限令牌仅允许访问申请提交和状态查询端点。
|
||||
"""
|
||||
now = datetime.now(timezone.utc)
|
||||
access_payload = {
|
||||
"sub": str(user_id),
|
||||
"type": "access",
|
||||
"limited": True,
|
||||
"exp": now + timedelta(minutes=config.JWT_ACCESS_TOKEN_EXPIRE_MINUTES),
|
||||
}
|
||||
refresh_payload = {
|
||||
"sub": str(user_id),
|
||||
"type": "refresh",
|
||||
"limited": True,
|
||||
"exp": now + timedelta(days=config.JWT_REFRESH_TOKEN_EXPIRE_DAYS),
|
||||
}
|
||||
return {
|
||||
"access_token": jwt.encode(
|
||||
access_payload, config.JWT_SECRET_KEY, algorithm=config.JWT_ALGORITHM
|
||||
),
|
||||
"refresh_token": jwt.encode(
|
||||
refresh_payload, config.JWT_SECRET_KEY, algorithm=config.JWT_ALGORITHM
|
||||
),
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def decode_token(token: str) -> dict:
|
||||
"""
|
||||
解码并验证 JWT 令牌。
|
||||
|
||||
@@ -14,7 +14,8 @@ from app import config
|
||||
# CHANGE 2026-02-19 | 新增 xcx_test 路由(MVP 验证)+ wx_callback 路由(微信消息推送)
|
||||
# CHANGE 2026-02-22 | 新增 member_birthday 路由(助教手动补录会员生日)
|
||||
# CHANGE 2026-02-23 | 新增 ops_panel 路由(运维控制面板)
|
||||
from app.routers import auth, execution, schedules, tasks, env_config, db_viewer, etl_status, xcx_test, wx_callback, member_birthday, ops_panel
|
||||
# CHANGE 2026-02-25 | 新增 xcx_auth 路由(小程序微信登录 + 申请 + 状态查询 + 店铺切换)
|
||||
from app.routers import auth, execution, schedules, tasks, env_config, db_viewer, etl_status, xcx_test, wx_callback, member_birthday, ops_panel, xcx_auth
|
||||
from app.services.scheduler import scheduler
|
||||
from app.services.task_queue import task_queue
|
||||
from app.ws.logs import ws_router
|
||||
@@ -64,6 +65,7 @@ app.include_router(xcx_test.router)
|
||||
app.include_router(wx_callback.router)
|
||||
app.include_router(member_birthday.router)
|
||||
app.include_router(ops_panel.router)
|
||||
app.include_router(xcx_auth.router)
|
||||
|
||||
|
||||
@app.get("/health", tags=["系统"])
|
||||
|
||||
417
apps/backend/app/routers/xcx_auth.py
Normal file
417
apps/backend/app/routers/xcx_auth.py
Normal file
@@ -0,0 +1,417 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
小程序认证路由 —— 微信登录、申请提交、状态查询、店铺切换、令牌刷新。
|
||||
|
||||
端点清单:
|
||||
- POST /api/xcx/login — 微信登录(查找/创建用户 + 签发 JWT)
|
||||
- POST /api/xcx/apply — 提交入驻申请
|
||||
- GET /api/xcx/me — 查询自身状态 + 申请列表
|
||||
- GET /api/xcx/me/sites — 查询关联店铺
|
||||
- POST /api/xcx/switch-site — 切换当前店铺
|
||||
- POST /api/xcx/refresh — 刷新令牌
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from jose import JWTError
|
||||
from psycopg2 import errors as pg_errors
|
||||
|
||||
from app.auth.dependencies import (
|
||||
CurrentUser,
|
||||
get_current_user,
|
||||
get_current_user_or_limited,
|
||||
)
|
||||
from app.auth.jwt import (
|
||||
create_limited_token_pair,
|
||||
create_token_pair,
|
||||
decode_refresh_token,
|
||||
)
|
||||
from app.database import get_connection
|
||||
from app.services.application import (
|
||||
create_application,
|
||||
get_user_applications,
|
||||
)
|
||||
from app.schemas.xcx_auth import (
|
||||
ApplicationRequest,
|
||||
ApplicationResponse,
|
||||
RefreshTokenRequest,
|
||||
SiteInfo,
|
||||
SwitchSiteRequest,
|
||||
UserStatusResponse,
|
||||
WxLoginRequest,
|
||||
WxLoginResponse,
|
||||
)
|
||||
from app.services.wechat import WeChatAuthError, code2session
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/xcx", tags=["小程序认证"])
|
||||
|
||||
|
||||
# ── 辅助:查询用户在指定 site_id 下的角色 code 列表 ──────────
|
||||
|
||||
def _get_user_roles_at_site(conn, user_id: int, site_id: int) -> list[str]:
|
||||
"""查询用户在指定 site_id 下的角色 code 列表。"""
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT r.code
|
||||
FROM auth.user_site_roles usr
|
||||
JOIN auth.roles r ON usr.role_id = r.id
|
||||
WHERE usr.user_id = %s AND usr.site_id = %s
|
||||
""",
|
||||
(user_id, site_id),
|
||||
)
|
||||
return [row[0] for row in cur.fetchall()]
|
||||
|
||||
|
||||
def _get_user_default_site(conn, user_id: int) -> int | None:
|
||||
"""获取用户第一个关联的 site_id(按创建时间排序)。"""
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT DISTINCT site_id
|
||||
FROM auth.user_site_roles
|
||||
WHERE user_id = %s
|
||||
ORDER BY site_id
|
||||
LIMIT 1
|
||||
""",
|
||||
(user_id,),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
return row[0] if row else None
|
||||
|
||||
|
||||
# ── POST /api/xcx/login ──────────────────────────────────
|
||||
|
||||
@router.post("/login", response_model=WxLoginResponse)
|
||||
async def wx_login(body: WxLoginRequest):
|
||||
"""
|
||||
微信登录。
|
||||
|
||||
流程:code → code2session(openid) → 查找/创建 auth.users → 签发 JWT。
|
||||
- disabled 用户返回 403
|
||||
- 新用户自动创建(status=pending)
|
||||
- approved 用户签发包含 site_id + roles 的完整令牌
|
||||
- pending/rejected 用户签发受限令牌
|
||||
"""
|
||||
# 1. 调用微信 code2Session
|
||||
try:
|
||||
wx_result = await code2session(body.code)
|
||||
except WeChatAuthError as exc:
|
||||
raise HTTPException(status_code=exc.http_status, detail=exc.detail)
|
||||
except RuntimeError as exc:
|
||||
# 微信配置缺失
|
||||
logger.error("微信配置错误: %s", exc)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="服务器配置错误",
|
||||
)
|
||||
|
||||
openid = wx_result["openid"]
|
||||
unionid = wx_result.get("unionid")
|
||||
|
||||
# 2. 查找/创建用户
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"SELECT id, status FROM auth.users WHERE wx_openid = %s",
|
||||
(openid,),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
|
||||
if row is None:
|
||||
# 新用户:创建 pending 记录
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO auth.users (wx_openid, wx_union_id, status)
|
||||
VALUES (%s, %s, 'pending')
|
||||
RETURNING id, status
|
||||
""",
|
||||
(openid, unionid),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
conn.commit()
|
||||
except pg_errors.UniqueViolation:
|
||||
# 并发创建:回滚后查询已有记录
|
||||
conn.rollback()
|
||||
cur.execute(
|
||||
"SELECT id, status FROM auth.users WHERE wx_openid = %s",
|
||||
(openid,),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
|
||||
user_id, user_status = row
|
||||
|
||||
# 3. disabled 用户拒绝登录
|
||||
if user_status == "disabled":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="账号已被禁用",
|
||||
)
|
||||
|
||||
# 4. 签发令牌
|
||||
if user_status == "approved":
|
||||
# 查找默认 site_id 和角色
|
||||
default_site_id = _get_user_default_site(conn, user_id)
|
||||
if default_site_id is not None:
|
||||
roles = _get_user_roles_at_site(conn, user_id, default_site_id)
|
||||
tokens = create_token_pair(user_id, default_site_id, roles=roles)
|
||||
else:
|
||||
# approved 但无 site 绑定(异常边界),签发受限令牌
|
||||
tokens = create_limited_token_pair(user_id)
|
||||
else:
|
||||
# pending / rejected → 受限令牌
|
||||
tokens = create_limited_token_pair(user_id)
|
||||
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return WxLoginResponse(
|
||||
access_token=tokens["access_token"],
|
||||
refresh_token=tokens["refresh_token"],
|
||||
token_type=tokens["token_type"],
|
||||
user_status=user_status,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
|
||||
# ── POST /api/xcx/apply ──────────────────────────────────
|
||||
|
||||
@router.post("/apply", response_model=ApplicationResponse)
|
||||
async def submit_application(
|
||||
body: ApplicationRequest,
|
||||
user: CurrentUser = Depends(get_current_user_or_limited),
|
||||
):
|
||||
"""
|
||||
提交入驻申请。
|
||||
|
||||
委托 application service 处理:
|
||||
检查重复 pending → site_code 映射 → 创建记录 → 更新 nickname。
|
||||
"""
|
||||
result = await create_application(
|
||||
user_id=user.user_id,
|
||||
site_code=body.site_code,
|
||||
applied_role_text=body.applied_role_text,
|
||||
phone=body.phone,
|
||||
employee_number=body.employee_number,
|
||||
nickname=body.nickname,
|
||||
)
|
||||
return ApplicationResponse(**result)
|
||||
|
||||
|
||||
# ── GET /api/xcx/me ───────────────────────────────────────
|
||||
|
||||
@router.get("/me", response_model=UserStatusResponse)
|
||||
async def get_my_status(
|
||||
user: CurrentUser = Depends(get_current_user_or_limited),
|
||||
):
|
||||
"""
|
||||
查询自身状态 + 所有申请记录。
|
||||
|
||||
pending / approved / rejected 用户均可访问。
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# 查询用户基本信息
|
||||
cur.execute(
|
||||
"SELECT id, status, nickname FROM auth.users WHERE id = %s",
|
||||
(user.user_id,),
|
||||
)
|
||||
user_row = cur.fetchone()
|
||||
if user_row is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="用户不存在",
|
||||
)
|
||||
|
||||
user_id, user_status, nickname = user_row
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
# 委托 service 查询申请列表
|
||||
app_list = await get_user_applications(user_id)
|
||||
applications = [ApplicationResponse(**a) for a in app_list]
|
||||
|
||||
return UserStatusResponse(
|
||||
user_id=user_id,
|
||||
status=user_status,
|
||||
nickname=nickname,
|
||||
applications=applications,
|
||||
)
|
||||
|
||||
|
||||
# ── GET /api/xcx/me/sites ────────────────────────────────
|
||||
|
||||
@router.get("/me/sites", response_model=list[SiteInfo])
|
||||
async def get_my_sites(
|
||||
user: CurrentUser = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
查询当前用户关联的所有店铺及对应角色。
|
||||
|
||||
仅 approved 用户可访问(通过 get_current_user 依赖保证)。
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT usr.site_id,
|
||||
COALESCE(scm.site_name, '未知店铺') AS site_name,
|
||||
r.code AS role_code,
|
||||
r.name AS role_name
|
||||
FROM auth.user_site_roles usr
|
||||
JOIN auth.roles r ON usr.role_id = r.id
|
||||
LEFT JOIN auth.site_code_mapping scm ON usr.site_id = scm.site_id
|
||||
WHERE usr.user_id = %s
|
||||
ORDER BY usr.site_id, r.code
|
||||
""",
|
||||
(user.user_id,),
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
# 按 site_id 分组
|
||||
sites_map: dict[int, SiteInfo] = {}
|
||||
for site_id, site_name, role_code, role_name in rows:
|
||||
if site_id not in sites_map:
|
||||
sites_map[site_id] = SiteInfo(
|
||||
site_id=site_id, site_name=site_name, roles=[]
|
||||
)
|
||||
sites_map[site_id].roles.append({"code": role_code, "name": role_name})
|
||||
|
||||
return list(sites_map.values())
|
||||
|
||||
|
||||
# ── POST /api/xcx/switch-site ────────────────────────────
|
||||
|
||||
@router.post("/switch-site", response_model=WxLoginResponse)
|
||||
async def switch_site(
|
||||
body: SwitchSiteRequest,
|
||||
user: CurrentUser = Depends(get_current_user),
|
||||
):
|
||||
"""
|
||||
切换当前店铺。
|
||||
|
||||
验证用户在目标 site_id 下有角色绑定,然后签发包含新 site_id 的 JWT。
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# 验证用户在目标 site_id 下有角色
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT 1 FROM auth.user_site_roles
|
||||
WHERE user_id = %s AND site_id = %s
|
||||
LIMIT 1
|
||||
""",
|
||||
(user.user_id, body.site_id),
|
||||
)
|
||||
if cur.fetchone() is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="您在该店铺下没有角色绑定",
|
||||
)
|
||||
|
||||
roles = _get_user_roles_at_site(conn, user.user_id, body.site_id)
|
||||
|
||||
# 查询用户状态
|
||||
cur.execute(
|
||||
"SELECT status FROM auth.users WHERE id = %s",
|
||||
(user.user_id,),
|
||||
)
|
||||
user_row = cur.fetchone()
|
||||
user_status = user_row[0] if user_row else "pending"
|
||||
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
tokens = create_token_pair(user.user_id, body.site_id, roles=roles)
|
||||
|
||||
return WxLoginResponse(
|
||||
access_token=tokens["access_token"],
|
||||
refresh_token=tokens["refresh_token"],
|
||||
token_type=tokens["token_type"],
|
||||
user_status=user_status,
|
||||
user_id=user.user_id,
|
||||
)
|
||||
|
||||
|
||||
# ── POST /api/xcx/refresh ────────────────────────────────
|
||||
|
||||
@router.post("/refresh", response_model=WxLoginResponse)
|
||||
async def refresh_token(body: RefreshTokenRequest):
|
||||
"""
|
||||
刷新令牌。
|
||||
|
||||
解码 refresh_token → 根据用户当前状态签发新的令牌对。
|
||||
- 受限 refresh_token(limited=True)→ 签发新的受限令牌对
|
||||
- 完整 refresh_token → 签发新的完整令牌对(保持原 site_id)
|
||||
"""
|
||||
try:
|
||||
payload = decode_refresh_token(body.refresh_token)
|
||||
except JWTError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="无效的刷新令牌",
|
||||
)
|
||||
|
||||
user_id = int(payload["sub"])
|
||||
is_limited = payload.get("limited", False)
|
||||
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# 查询用户当前状态
|
||||
cur.execute(
|
||||
"SELECT id, status FROM auth.users WHERE id = %s",
|
||||
(user_id,),
|
||||
)
|
||||
user_row = cur.fetchone()
|
||||
if user_row is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="用户不存在",
|
||||
)
|
||||
|
||||
_, user_status = user_row
|
||||
|
||||
if user_status == "disabled":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="账号已被禁用",
|
||||
)
|
||||
|
||||
if is_limited or user_status != "approved":
|
||||
# 受限令牌刷新 → 仍签发受限令牌
|
||||
tokens = create_limited_token_pair(user_id)
|
||||
else:
|
||||
# 完整令牌刷新 → 使用原 site_id 签发
|
||||
site_id = payload.get("site_id")
|
||||
if site_id is None:
|
||||
# 回退到默认 site
|
||||
site_id = _get_user_default_site(conn, user_id)
|
||||
if site_id is not None:
|
||||
roles = _get_user_roles_at_site(conn, user_id, site_id)
|
||||
tokens = create_token_pair(user_id, site_id, roles=roles)
|
||||
else:
|
||||
tokens = create_limited_token_pair(user_id)
|
||||
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return WxLoginResponse(
|
||||
access_token=tokens["access_token"],
|
||||
refresh_token=tokens["refresh_token"],
|
||||
token_type=tokens["token_type"],
|
||||
user_status=user_status,
|
||||
user_id=user_id,
|
||||
)
|
||||
104
apps/backend/app/schemas/xcx_auth.py
Normal file
104
apps/backend/app/schemas/xcx_auth.py
Normal file
@@ -0,0 +1,104 @@
|
||||
"""
|
||||
小程序认证相关 Pydantic 模型。
|
||||
|
||||
覆盖:微信登录、用户申请、审核、人员匹配、店铺切换等场景。
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
# ── 微信登录 ──────────────────────────────────────────────
|
||||
|
||||
class WxLoginRequest(BaseModel):
|
||||
"""微信登录请求。"""
|
||||
code: str = Field(..., min_length=1, description="微信临时登录凭证")
|
||||
|
||||
|
||||
class WxLoginResponse(BaseModel):
|
||||
"""微信登录响应。"""
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str = "bearer"
|
||||
user_status: str # pending / approved / rejected / disabled
|
||||
user_id: int
|
||||
|
||||
|
||||
# ── 用户申请 ──────────────────────────────────────────────
|
||||
|
||||
class ApplicationRequest(BaseModel):
|
||||
"""用户申请提交请求。"""
|
||||
site_code: str = Field(..., pattern=r"^[A-Za-z]{2}\d{3}$", description="球房ID")
|
||||
applied_role_text: str = Field(..., min_length=1, max_length=100, description="申请身份")
|
||||
phone: str = Field(..., pattern=r"^\d{11}$", description="手机号")
|
||||
employee_number: str | None = Field(None, max_length=50, description="员工编号")
|
||||
nickname: str | None = Field(None, max_length=50, description="昵称")
|
||||
|
||||
|
||||
class ApplicationResponse(BaseModel):
|
||||
"""申请记录响应。"""
|
||||
id: int
|
||||
site_code: str
|
||||
applied_role_text: str
|
||||
status: str
|
||||
review_note: str | None = None
|
||||
created_at: str
|
||||
reviewed_at: str | None = None
|
||||
|
||||
|
||||
# ── 用户状态 ──────────────────────────────────────────────
|
||||
|
||||
class UserStatusResponse(BaseModel):
|
||||
"""用户状态查询响应。"""
|
||||
user_id: int
|
||||
status: str
|
||||
nickname: str | None = None
|
||||
applications: list[ApplicationResponse] = []
|
||||
|
||||
|
||||
# ── 店铺 ──────────────────────────────────────────────────
|
||||
|
||||
class SiteInfo(BaseModel):
|
||||
"""店铺信息。"""
|
||||
site_id: int
|
||||
site_name: str
|
||||
roles: list[dict] = []
|
||||
|
||||
|
||||
class SwitchSiteRequest(BaseModel):
|
||||
"""切换店铺请求。"""
|
||||
site_id: int
|
||||
|
||||
|
||||
# ── 刷新令牌 ──────────────────────────────────────────────
|
||||
|
||||
class RefreshTokenRequest(BaseModel):
|
||||
"""刷新令牌请求。"""
|
||||
refresh_token: str = Field(..., min_length=1, description="刷新令牌")
|
||||
|
||||
|
||||
# ── 人员匹配 ──────────────────────────────────────────────
|
||||
|
||||
class MatchCandidate(BaseModel):
|
||||
"""匹配候选人。"""
|
||||
source_type: str # assistant / staff
|
||||
id: int
|
||||
name: str
|
||||
mobile: str | None = None
|
||||
job_num: str | None = None
|
||||
|
||||
|
||||
# ── 管理端审核 ────────────────────────────────────────────
|
||||
|
||||
class ApproveRequest(BaseModel):
|
||||
"""批准申请请求。"""
|
||||
role_id: int
|
||||
binding: dict | None = None # {"assistant_id": ..., "staff_id": ..., "binding_type": ...}
|
||||
review_note: str | None = None
|
||||
|
||||
|
||||
class RejectRequest(BaseModel):
|
||||
"""拒绝申请请求。"""
|
||||
review_note: str = Field(..., min_length=1, description="拒绝原因")
|
||||
|
||||
347
apps/backend/app/services/application.py
Normal file
347
apps/backend/app/services/application.py
Normal file
@@ -0,0 +1,347 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
申请服务 —— 处理用户申请的创建、查询、审核。
|
||||
|
||||
职责:
|
||||
- create_application():创建申请 + site_code 映射查找
|
||||
- approve_application():批准 + 创建绑定/角色
|
||||
- reject_application():拒绝 + 记录原因
|
||||
- get_user_applications():查询用户申请列表
|
||||
|
||||
所有数据库操作使用 psycopg2 原生 SQL,不引入 ORM。
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
from app.database import get_connection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def create_application(
|
||||
user_id: int,
|
||||
site_code: str,
|
||||
applied_role_text: str,
|
||||
phone: str,
|
||||
employee_number: str | None = None,
|
||||
nickname: str | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
创建用户申请。
|
||||
|
||||
1. 检查是否有 pending 申请(有则 409)
|
||||
2. 查找 site_code → site_id 映射
|
||||
3. 插入 user_applications 记录
|
||||
4. 更新 users.nickname(如提供)
|
||||
|
||||
返回:
|
||||
申请记录 dict,包含 id / site_code / applied_role_text / status /
|
||||
review_note / created_at / reviewed_at
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# 1. 检查重复 pending 申请
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT id FROM auth.user_applications
|
||||
WHERE user_id = %s AND status = 'pending'
|
||||
LIMIT 1
|
||||
""",
|
||||
(user_id,),
|
||||
)
|
||||
if cur.fetchone() is not None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="已有待审核的申请,请等待审核完成",
|
||||
)
|
||||
|
||||
# 2. 查找 site_code → site_id 映射
|
||||
site_id = None
|
||||
cur.execute(
|
||||
"SELECT site_id FROM auth.site_code_mapping WHERE site_code = %s",
|
||||
(site_code,),
|
||||
)
|
||||
mapping_row = cur.fetchone()
|
||||
if mapping_row is not None:
|
||||
site_id = mapping_row[0]
|
||||
|
||||
# 3. 创建申请记录
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO auth.user_applications
|
||||
(user_id, site_code, site_id, applied_role_text,
|
||||
phone, employee_number, status)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, 'pending')
|
||||
RETURNING id, site_code, applied_role_text, status,
|
||||
review_note, created_at::text, reviewed_at::text
|
||||
""",
|
||||
(
|
||||
user_id,
|
||||
site_code,
|
||||
site_id,
|
||||
applied_role_text,
|
||||
phone,
|
||||
employee_number,
|
||||
),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
|
||||
# 4. 更新 nickname(如提供)
|
||||
if nickname:
|
||||
cur.execute(
|
||||
"UPDATE auth.users SET nickname = %s, updated_at = NOW() WHERE id = %s",
|
||||
(nickname, user_id),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return {
|
||||
"id": row[0],
|
||||
"site_code": row[1],
|
||||
"applied_role_text": row[2],
|
||||
"status": row[3],
|
||||
"review_note": row[4],
|
||||
"created_at": row[5],
|
||||
"reviewed_at": row[6],
|
||||
}
|
||||
|
||||
|
||||
|
||||
async def approve_application(
|
||||
application_id: int,
|
||||
reviewer_id: int,
|
||||
role_id: int,
|
||||
binding: dict | None = None,
|
||||
review_note: str | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
批准申请。
|
||||
|
||||
1. 查询申请记录(不存在则 404)
|
||||
2. 检查申请状态为 pending(否则 409)
|
||||
3. 更新 user_applications.status = 'approved'
|
||||
4. 创建 user_site_roles 记录
|
||||
5. 创建 user_assistant_binding 记录(如有 binding)
|
||||
6. 更新 users.status = 'approved'
|
||||
7. 记录 reviewer_id 和 reviewed_at
|
||||
|
||||
参数:
|
||||
application_id: 申请 ID
|
||||
reviewer_id: 审核人 user_id
|
||||
role_id: 分配的角色 ID
|
||||
binding: 绑定信息,格式 {"assistant_id": ..., "staff_id": ..., "binding_type": ...}
|
||||
review_note: 审核备注(可选)
|
||||
|
||||
返回:
|
||||
更新后的申请记录 dict
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# 1. 查询申请记录
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT id, user_id, site_id, status
|
||||
FROM auth.user_applications
|
||||
WHERE id = %s
|
||||
""",
|
||||
(application_id,),
|
||||
)
|
||||
app_row = cur.fetchone()
|
||||
if app_row is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="申请不存在",
|
||||
)
|
||||
|
||||
_, app_user_id, app_site_id, app_status = app_row
|
||||
|
||||
# 2. 检查状态为 pending
|
||||
if app_status != "pending":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"申请当前状态为 {app_status},无法审核",
|
||||
)
|
||||
|
||||
# 3. 更新申请状态为 approved
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE auth.user_applications
|
||||
SET status = 'approved',
|
||||
reviewer_id = %s,
|
||||
review_note = %s,
|
||||
reviewed_at = NOW()
|
||||
WHERE id = %s
|
||||
RETURNING id, site_code, applied_role_text, status,
|
||||
review_note, created_at::text, reviewed_at::text
|
||||
""",
|
||||
(reviewer_id, review_note, application_id),
|
||||
)
|
||||
updated_row = cur.fetchone()
|
||||
|
||||
# 4. 创建 user_site_roles 记录(如果有 site_id)
|
||||
if app_site_id is not None:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO auth.user_site_roles (user_id, site_id, role_id)
|
||||
VALUES (%s, %s, %s)
|
||||
ON CONFLICT (user_id, site_id, role_id) DO NOTHING
|
||||
""",
|
||||
(app_user_id, app_site_id, role_id),
|
||||
)
|
||||
|
||||
# 5. 创建 user_assistant_binding 记录(如有 binding 且有 site_id)
|
||||
if binding and app_site_id is not None:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO auth.user_assistant_binding
|
||||
(user_id, site_id, assistant_id, staff_id, binding_type)
|
||||
VALUES (%s, %s, %s, %s, %s)
|
||||
""",
|
||||
(
|
||||
app_user_id,
|
||||
app_site_id,
|
||||
binding.get("assistant_id"),
|
||||
binding.get("staff_id"),
|
||||
binding.get("binding_type", "assistant"),
|
||||
),
|
||||
)
|
||||
|
||||
# 6. 更新用户状态为 approved
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE auth.users
|
||||
SET status = 'approved', updated_at = NOW()
|
||||
WHERE id = %s
|
||||
""",
|
||||
(app_user_id,),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return {
|
||||
"id": updated_row[0],
|
||||
"site_code": updated_row[1],
|
||||
"applied_role_text": updated_row[2],
|
||||
"status": updated_row[3],
|
||||
"review_note": updated_row[4],
|
||||
"created_at": updated_row[5],
|
||||
"reviewed_at": updated_row[6],
|
||||
}
|
||||
|
||||
|
||||
async def reject_application(
|
||||
application_id: int,
|
||||
reviewer_id: int,
|
||||
review_note: str,
|
||||
) -> dict:
|
||||
"""
|
||||
拒绝申请。
|
||||
|
||||
1. 查询申请记录(不存在则 404)
|
||||
2. 检查申请状态为 pending(否则 409)
|
||||
3. 更新 user_applications.status = 'rejected'
|
||||
4. 记录 reviewer_id、review_note、reviewed_at
|
||||
|
||||
返回:
|
||||
更新后的申请记录 dict
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
# 1. 查询申请记录
|
||||
cur.execute(
|
||||
"SELECT id, status FROM auth.user_applications WHERE id = %s",
|
||||
(application_id,),
|
||||
)
|
||||
app_row = cur.fetchone()
|
||||
if app_row is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="申请不存在",
|
||||
)
|
||||
|
||||
# 2. 检查状态为 pending
|
||||
if app_row[1] != "pending":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=f"申请当前状态为 {app_row[1]},无法审核",
|
||||
)
|
||||
|
||||
# 3. 更新申请状态为 rejected
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE auth.user_applications
|
||||
SET status = 'rejected',
|
||||
reviewer_id = %s,
|
||||
review_note = %s,
|
||||
reviewed_at = NOW()
|
||||
WHERE id = %s
|
||||
RETURNING id, site_code, applied_role_text, status,
|
||||
review_note, created_at::text, reviewed_at::text
|
||||
""",
|
||||
(reviewer_id, review_note, application_id),
|
||||
)
|
||||
updated_row = cur.fetchone()
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return {
|
||||
"id": updated_row[0],
|
||||
"site_code": updated_row[1],
|
||||
"applied_role_text": updated_row[2],
|
||||
"status": updated_row[3],
|
||||
"review_note": updated_row[4],
|
||||
"created_at": updated_row[5],
|
||||
"reviewed_at": updated_row[6],
|
||||
}
|
||||
|
||||
|
||||
async def get_user_applications(user_id: int) -> list[dict]:
|
||||
"""
|
||||
查询用户的所有申请记录。
|
||||
|
||||
按创建时间倒序排列。
|
||||
|
||||
返回:
|
||||
申请记录 dict 列表
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT id, site_code, applied_role_text, status,
|
||||
review_note, created_at::text, reviewed_at::text
|
||||
FROM auth.user_applications
|
||||
WHERE user_id = %s
|
||||
ORDER BY created_at DESC
|
||||
""",
|
||||
(user_id,),
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": r[0],
|
||||
"site_code": r[1],
|
||||
"applied_role_text": r[2],
|
||||
"status": r[3],
|
||||
"review_note": r[4],
|
||||
"created_at": r[5],
|
||||
"reviewed_at": r[6],
|
||||
}
|
||||
for r in rows
|
||||
]
|
||||
170
apps/backend/app/services/matching.py
Normal file
170
apps/backend/app/services/matching.py
Normal file
@@ -0,0 +1,170 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
人员匹配服务 —— 根据申请信息在 FDW 外部表中查找候选匹配。
|
||||
|
||||
职责:
|
||||
- find_candidates():根据 site_id + phone(+ employee_number)在助教表和员工表中查找匹配
|
||||
|
||||
查询通过业务库的 fdw_etl Schema 访问 ETL 库的 RLS 视图。
|
||||
查询前需 SET LOCAL app.current_site_id 以启用门店隔离。
|
||||
FDW 外部表可能不存在(测试库等场景),需优雅降级返回空列表。
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from app.database import get_connection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def find_candidates(
|
||||
site_id: int | None,
|
||||
phone: str,
|
||||
employee_number: str | None = None,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
在助教表和员工表中查找匹配候选。
|
||||
|
||||
查询逻辑:
|
||||
1. 若 site_id 为 None,跳过匹配,返回空列表
|
||||
2. 设置 app.current_site_id 进行 RLS 隔离
|
||||
3. fdw_etl.v_dim_assistant: WHERE mobile = phone
|
||||
4. fdw_etl.v_dim_staff JOIN fdw_etl.v_dim_staff_ex: WHERE mobile = phone OR job_num = employee_number
|
||||
5. 合并结果返回统一候选列表
|
||||
|
||||
参数:
|
||||
site_id: 门店 ID(None 时跳过匹配)
|
||||
phone: 手机号
|
||||
employee_number: 员工编号(可选,用于 job_num 匹配)
|
||||
|
||||
返回:
|
||||
[{"source_type": "assistant"|"staff", "id": int, "name": str,
|
||||
"mobile": str|None, "job_num": str|None}]
|
||||
"""
|
||||
# site_id 为空时直接返回空列表(需求 5.6)
|
||||
if site_id is None:
|
||||
return []
|
||||
|
||||
candidates: list[dict] = []
|
||||
|
||||
conn = get_connection()
|
||||
try:
|
||||
conn.autocommit = False
|
||||
with conn.cursor() as cur:
|
||||
# 设置 RLS 隔离:FDW 会透传 session 变量到远端 ETL 库
|
||||
cur.execute(
|
||||
"SET LOCAL app.current_site_id = %s", (str(site_id),)
|
||||
)
|
||||
|
||||
# 1. 查询助教匹配
|
||||
candidates.extend(_query_assistants(cur, phone))
|
||||
|
||||
# 2. 查询员工匹配
|
||||
candidates.extend(_query_staff(cur, phone, employee_number))
|
||||
|
||||
conn.commit()
|
||||
except Exception:
|
||||
logger.warning(
|
||||
"FDW 人员匹配查询失败 (site_id=%s, phone=%s),返回空列表",
|
||||
site_id,
|
||||
phone,
|
||||
exc_info=True,
|
||||
)
|
||||
try:
|
||||
conn.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
return []
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return candidates
|
||||
|
||||
|
||||
def _query_assistants(cur, phone: str) -> list[dict]:
|
||||
"""查询 fdw_etl.v_dim_assistant 中按 mobile 匹配的助教记录。"""
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT assistant_id, real_name, mobile
|
||||
FROM fdw_etl.v_dim_assistant
|
||||
WHERE mobile = %s
|
||||
AND scd2_is_current = TRUE
|
||||
""",
|
||||
(phone,),
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
except Exception:
|
||||
logger.warning(
|
||||
"查询 fdw_etl.v_dim_assistant 失败,跳过助教匹配",
|
||||
exc_info=True,
|
||||
)
|
||||
return []
|
||||
|
||||
return [
|
||||
{
|
||||
"source_type": "assistant",
|
||||
"id": row[0],
|
||||
"name": row[1] or "",
|
||||
"mobile": row[2],
|
||||
"job_num": None,
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
|
||||
def _query_staff(
|
||||
cur, phone: str, employee_number: str | None
|
||||
) -> list[dict]:
|
||||
"""
|
||||
查询 fdw_etl.v_dim_staff JOIN fdw_etl.v_dim_staff_ex
|
||||
按 mobile 或 job_num 匹配的员工记录。
|
||||
"""
|
||||
try:
|
||||
# 构建 WHERE 条件:mobile = phone,或 job_num = employee_number
|
||||
if employee_number:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT s.staff_id, s.staff_name, s.mobile, ex.job_num
|
||||
FROM fdw_etl.v_dim_staff s
|
||||
LEFT JOIN fdw_etl.v_dim_staff_ex ex
|
||||
ON s.staff_id = ex.staff_id
|
||||
AND ex.scd2_is_current = TRUE
|
||||
WHERE s.scd2_is_current = TRUE
|
||||
AND (s.mobile = %s OR ex.job_num = %s)
|
||||
""",
|
||||
(phone, employee_number),
|
||||
)
|
||||
else:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT s.staff_id, s.staff_name, s.mobile, ex.job_num
|
||||
FROM fdw_etl.v_dim_staff s
|
||||
LEFT JOIN fdw_etl.v_dim_staff_ex ex
|
||||
ON s.staff_id = ex.staff_id
|
||||
AND ex.scd2_is_current = TRUE
|
||||
WHERE s.scd2_is_current = TRUE
|
||||
AND s.mobile = %s
|
||||
""",
|
||||
(phone,),
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
except Exception:
|
||||
logger.warning(
|
||||
"查询 fdw_etl.v_dim_staff 失败,跳过员工匹配",
|
||||
exc_info=True,
|
||||
)
|
||||
return []
|
||||
|
||||
return [
|
||||
{
|
||||
"source_type": "staff",
|
||||
"id": row[0],
|
||||
"name": row[1] or "",
|
||||
"mobile": row[2],
|
||||
"job_num": row[3],
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
129
apps/backend/app/services/role.py
Normal file
129
apps/backend/app/services/role.py
Normal file
@@ -0,0 +1,129 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
角色权限服务 —— 查询用户在指定店铺下的角色和权限。
|
||||
|
||||
职责:
|
||||
- get_user_permissions():获取用户在指定 site_id 下的权限 code 列表
|
||||
- get_user_sites():获取用户关联的所有店铺及对应角色
|
||||
- check_user_has_site_role():检查用户在指定 site_id 下是否有任何角色绑定
|
||||
|
||||
所有数据库操作使用 psycopg2 原生 SQL,不引入 ORM。
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from app.database import get_connection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def get_user_permissions(user_id: int, site_id: int) -> list[str]:
|
||||
"""
|
||||
获取用户在指定 site_id 下的权限 code 列表。
|
||||
|
||||
通过 user_site_roles → role_permissions → permissions 三表联查,
|
||||
返回去重后的权限 code 列表。
|
||||
|
||||
参数:
|
||||
user_id: 用户 ID
|
||||
site_id: 门店 ID
|
||||
|
||||
返回:
|
||||
权限 code 字符串列表,如 ["view_tasks", "view_board"]
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT DISTINCT p.code
|
||||
FROM auth.user_site_roles usr
|
||||
JOIN auth.role_permissions rp ON usr.role_id = rp.role_id
|
||||
JOIN auth.permissions p ON rp.permission_id = p.id
|
||||
WHERE usr.user_id = %s AND usr.site_id = %s
|
||||
""",
|
||||
(user_id, site_id),
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
return [row[0] for row in rows]
|
||||
|
||||
|
||||
async def get_user_sites(user_id: int) -> list[dict]:
|
||||
"""
|
||||
获取用户关联的所有店铺及对应角色。
|
||||
|
||||
查询 user_site_roles JOIN roles,LEFT JOIN site_code_mapping 获取店铺名称,
|
||||
按 site_id 分组聚合角色列表。
|
||||
|
||||
参数:
|
||||
user_id: 用户 ID
|
||||
|
||||
返回:
|
||||
[{"site_id": int, "site_name": str, "roles": [{"code": str, "name": str}]}]
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT usr.site_id,
|
||||
COALESCE(scm.site_name, '') AS site_name,
|
||||
r.code,
|
||||
r.name
|
||||
FROM auth.user_site_roles usr
|
||||
JOIN auth.roles r ON usr.role_id = r.id
|
||||
LEFT JOIN auth.site_code_mapping scm ON usr.site_id = scm.site_id
|
||||
WHERE usr.user_id = %s
|
||||
ORDER BY usr.site_id, r.code
|
||||
""",
|
||||
(user_id,),
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
# 按 site_id 分组聚合
|
||||
sites_map: dict[int, dict] = {}
|
||||
for site_id, site_name, role_code, role_name in rows:
|
||||
if site_id not in sites_map:
|
||||
sites_map[site_id] = {
|
||||
"site_id": site_id,
|
||||
"site_name": site_name,
|
||||
"roles": [],
|
||||
}
|
||||
sites_map[site_id]["roles"].append({"code": role_code, "name": role_name})
|
||||
|
||||
return list(sites_map.values())
|
||||
|
||||
|
||||
async def check_user_has_site_role(user_id: int, site_id: int) -> bool:
|
||||
"""
|
||||
检查用户在指定 site_id 下是否有任何角色绑定。
|
||||
|
||||
参数:
|
||||
user_id: 用户 ID
|
||||
site_id: 门店 ID
|
||||
|
||||
返回:
|
||||
True 表示有角色绑定,False 表示无
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT 1
|
||||
FROM auth.user_site_roles
|
||||
WHERE user_id = %s AND site_id = %s
|
||||
LIMIT 1
|
||||
""",
|
||||
(user_id, site_id),
|
||||
)
|
||||
return cur.fetchone() is not None
|
||||
finally:
|
||||
conn.close()
|
||||
@@ -44,7 +44,7 @@ class DwdTableDefinition:
|
||||
ODS_TASKS: list[TaskDefinition] = [
|
||||
TaskDefinition("ODS_ASSISTANT_ACCOUNT", "助教账号", "抽取助教账号主数据", "助教", "ODS", is_ods=True),
|
||||
TaskDefinition("ODS_ASSISTANT_LEDGER", "助教服务记录", "抽取助教服务流水", "助教", "ODS", is_ods=True),
|
||||
TaskDefinition("ODS_ASSISTANT_ABOLISH", "助教取消记录", "抽取助教取消/作废记录", "助教", "ODS", is_ods=True),
|
||||
|
||||
TaskDefinition("ODS_SETTLEMENT_RECORDS", "结算记录", "抽取订单结算记录", "结算", "ODS", is_ods=True),
|
||||
# CHANGE [2026-07-20] intent: 同步 ETL 侧移除——ODS_SETTLEMENT_TICKET 已在 Task 7.3 中彻底移除
|
||||
TaskDefinition("ODS_TABLE_USE", "台费流水", "抽取台费使用流水", "台桌", "ODS", is_ods=True),
|
||||
@@ -65,6 +65,7 @@ ODS_TASKS: list[TaskDefinition] = [
|
||||
TaskDefinition("ODS_STORE_GOODS", "门店商品", "抽取门店商品主数据", "商品", "ODS", is_ods=True, requires_window=False),
|
||||
TaskDefinition("ODS_STORE_GOODS_SALES", "商品销售", "抽取门店商品销售记录", "商品", "ODS", is_ods=True),
|
||||
TaskDefinition("ODS_TENANT_GOODS", "租户商品", "抽取租户级商品主数据", "商品", "ODS", is_ods=True, requires_window=False),
|
||||
TaskDefinition("ODS_STAFF_INFO", "员工档案", "抽取员工档案(含在职/离职)", "助教", "ODS", is_ods=True, requires_window=False),
|
||||
]
|
||||
|
||||
# ── DWD 任务定义 ──────────────────────────────────────────────
|
||||
@@ -105,18 +106,17 @@ INDEX_TASKS: list[TaskDefinition] = [
|
||||
TaskDefinition("DWS_ML_MANUAL_IMPORT", "手动导入 (ML)", "手动导入机器学习数据", "指数", "INDEX", requires_window=False, is_common=False),
|
||||
# CHANGE [2026-02-19] intent: 补充说明 RelationIndexTask 产出 RS/OS/MS/ML 四个子指数
|
||||
TaskDefinition("DWS_RELATION_INDEX", "关系指数 (RS)", "产出 RS/OS/MS/ML 四个子指数", "指数", "INDEX"),
|
||||
TaskDefinition("DWS_SPENDING_POWER_INDEX", "消费力指数 (SPI)", "计算会员消费力指数", "指数", "INDEX"),
|
||||
]
|
||||
|
||||
# ── 工具类任务定义 ────────────────────────────────────────────
|
||||
|
||||
UTILITY_TASKS: list[TaskDefinition] = [
|
||||
TaskDefinition("MANUAL_INGEST", "手动导入", "从本地 JSON 文件手动导入数据", "工具", "UTILITY", requires_window=False, is_common=False),
|
||||
TaskDefinition("INIT_ODS_SCHEMA", "初始化 ODS Schema", "创建 ODS 层表结构", "工具", "UTILITY", requires_window=False, is_common=False),
|
||||
TaskDefinition("INIT_DWD_SCHEMA", "初始化 DWD Schema", "创建 DWD 层表结构", "工具", "UTILITY", requires_window=False, is_common=False),
|
||||
TaskDefinition("INIT_DWS_SCHEMA", "初始化 DWS Schema", "创建 DWS 层表结构", "工具", "UTILITY", requires_window=False, is_common=False),
|
||||
# CHANGE [2026-02-24] intent: 移除 4 个一次性初始化任务(INIT_ODS/DWD/DWS_SCHEMA、SEED_DWS_CONFIG),
|
||||
# 环境已搭建完成,仅保留 ETL 侧实现供运维脚本直接 import 使用,UI 不再展示
|
||||
TaskDefinition("ODS_JSON_ARCHIVE", "ODS JSON 归档", "归档 ODS 原始 JSON 文件", "工具", "UTILITY", requires_window=False, is_common=False),
|
||||
TaskDefinition("CHECK_CUTOFF", "游标检查", "检查各任务数据游标截止点", "工具", "UTILITY", requires_window=False, is_common=False),
|
||||
TaskDefinition("SEED_DWS_CONFIG", "DWS 配置种子", "初始化 DWS 配置数据", "工具", "UTILITY", requires_window=False, is_common=False),
|
||||
TaskDefinition("DATA_INTEGRITY_CHECK", "数据完整性校验", "校验跨层数据完整性", "工具", "UTILITY", requires_window=False, is_common=False),
|
||||
]
|
||||
|
||||
@@ -202,8 +202,7 @@ DWD_TABLES: list[DwdTableDefinition] = [
|
||||
DwdTableDefinition("dwd.dwd_store_goods_sale_ex", "商品销售(扩展)", "商品", "ods.store_goods_sales_records"),
|
||||
DwdTableDefinition("dwd.dwd_assistant_service_log", "助教服务流水", "助教", "ods.assistant_service_records"),
|
||||
DwdTableDefinition("dwd.dwd_assistant_service_log_ex", "助教服务流水(扩展)", "助教", "ods.assistant_service_records"),
|
||||
DwdTableDefinition("dwd.dwd_assistant_trash_event", "助教取消事件", "助教", "ods.assistant_cancellation_records"),
|
||||
DwdTableDefinition("dwd.dwd_assistant_trash_event_ex", "助教取消事件(扩展)", "助教", "ods.assistant_cancellation_records"),
|
||||
# CHANGE [2026-02-24] intent: 移除已废弃的 assistant_trash_event 表定义(ODS_ASSISTANT_ABOLISH 全链路已清理)
|
||||
DwdTableDefinition("dwd.dwd_member_balance_change", "会员余额变动", "会员", "ods.member_balance_changes"),
|
||||
DwdTableDefinition("dwd.dwd_member_balance_change_ex", "会员余额变动(扩展)", "会员", "ods.member_balance_changes"),
|
||||
DwdTableDefinition("dwd.dwd_groupbuy_redemption", "团购核销", "团购", "ods.group_buy_redemption_records"),
|
||||
|
||||
90
apps/backend/app/services/wechat.py
Normal file
90
apps/backend/app/services/wechat.py
Normal file
@@ -0,0 +1,90 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
微信认证服务 —— 封装 code2Session API 调用。
|
||||
|
||||
通过 httpx.AsyncClient 异步调用微信 jscode2session 接口,
|
||||
将小程序端的临时登录凭证 (code) 换取 openid / session_key。
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import httpx
|
||||
|
||||
from app.config import get
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CODE2SESSION_URL = "https://api.weixin.qq.com/sns/jscode2session"
|
||||
|
||||
# 微信 errcode → (HTTP 状态码, 用户可见提示)
|
||||
_WX_ERROR_MAP: dict[int, tuple[int, str]] = {
|
||||
40029: (401, "登录凭证无效,请重新登录"),
|
||||
45011: (429, "请求过于频繁"),
|
||||
40226: (403, "账号存在风险"),
|
||||
}
|
||||
|
||||
|
||||
class WeChatAuthError(Exception):
|
||||
"""微信认证错误,包含 errcode 和 errmsg。"""
|
||||
|
||||
def __init__(self, errcode: int, errmsg: str) -> None:
|
||||
self.errcode = errcode
|
||||
self.errmsg = errmsg
|
||||
super().__init__(f"WeChatAuthError({errcode}): {errmsg}")
|
||||
|
||||
@property
|
||||
def http_status(self) -> int:
|
||||
"""根据 errcode 映射到建议的 HTTP 状态码。"""
|
||||
return _WX_ERROR_MAP.get(self.errcode, (401, ""))[0]
|
||||
|
||||
@property
|
||||
def detail(self) -> str:
|
||||
"""根据 errcode 映射到用户可见的错误提示。"""
|
||||
return _WX_ERROR_MAP.get(self.errcode, (401, "微信登录失败"))[1]
|
||||
|
||||
|
||||
async def code2session(code: str) -> dict:
|
||||
"""
|
||||
调用微信 code2Session 接口。
|
||||
|
||||
参数:
|
||||
code: 小程序端 wx.login() 获取的临时登录凭证
|
||||
|
||||
返回:
|
||||
{"openid": str, "session_key": str, "unionid": str | None}
|
||||
|
||||
异常:
|
||||
WeChatAuthError: 微信接口返回非零 errcode 时抛出
|
||||
RuntimeError: WX_APPID / WX_SECRET 环境变量缺失时抛出
|
||||
"""
|
||||
appid = get("WX_APPID", "")
|
||||
secret = get("WX_SECRET", "")
|
||||
|
||||
if not appid or not secret:
|
||||
raise RuntimeError("微信配置缺失:WX_APPID 或 WX_SECRET 未设置")
|
||||
|
||||
params = {
|
||||
"appid": appid,
|
||||
"secret": secret,
|
||||
"js_code": code,
|
||||
"grant_type": "authorization_code",
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
resp = await client.get(CODE2SESSION_URL, params=params)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
|
||||
errcode = data.get("errcode", 0)
|
||||
if errcode != 0:
|
||||
errmsg = data.get("errmsg", "unknown error")
|
||||
logger.warning("微信 code2Session 失败: errcode=%s, errmsg=%s", errcode, errmsg)
|
||||
raise WeChatAuthError(errcode, errmsg)
|
||||
|
||||
return {
|
||||
"openid": data["openid"],
|
||||
"session_key": data["session_key"],
|
||||
"unionid": data.get("unionid"),
|
||||
}
|
||||
6
apps/backend/auth_only.txt
Normal file
6
apps/backend/auth_only.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
..................F........... [100%]
|
||||
================================== FAILURES ===================================
|
||||
__________________ test_invalid_credentials_always_rejected ___________________
|
||||
+ Exception Group Traceback (most recent call last):\n | File "C:\\ProgramData\\miniconda3\\Lib\\unittest\\mock.py", line 1422, in test_invalid_credentials_always_rejected\n | def patched(*args, **keywargs):\n | ^^^^^^^^^^^^^^^^^^^^\n | File "C:\\NeoZQYY\\.venv\\Lib\\site-packages\\hypothesis\\core.py", line 2246, in wrapped_test\n | raise the_error_hypothesis_found\n | hypothesis.errors.FlakyFailure: Hypothesis test_invalid_credentials_always_rejected(password='\u4d77\U0002325c\u0133', username='uv') produces unreliable results: Falsified on the first call but did not on a subsequent one (1 sub-exception)\n | Falsifying example: test_invalid_credentials_always_rejected(\n | username='uv',\n | password='\u4d77\U0002325c\u0133',\n | )\n | Unreliable test timings! On an initial run, this test took 285.80ms, which exceeded the deadline of 200.00ms, but on a subsequent run it took 5.67 ms, which did not. If you expect this sort of variability in your test timings, consider turning deadlines off for this test by setting deadline=None.\n +-+---------------- 1 ----------------\n | Traceback (most recent call last):\n | File "C:\\NeoZQYY\\.venv\\Lib\\site-packages\\hypothesis\\core.py", line 1211, in _execute_once_for_engine\n | result = self.execute_once(data)\n | File "C:\\NeoZQYY\\.venv\\Lib\\site-packages\\hypothesis\\core.py", line 1150, in execute_once\n | result = self.test_runner(data, run)\n | File "C:\\NeoZQYY\\.venv\\Lib\\site-packages\\hypothesis\\core.py", line 824, in default_executor\n | return function(data)\n | File "C:\\NeoZQYY\\.venv\\Lib\\site-packages\\hypothesis\\core.py", line 1107, in run\n | return test(*args, **kwargs)\n | File "C:\\ProgramData\\miniconda3\\Lib\\unittest\\mock.py", line 1422, in test_invalid_credentials_always_rejected\n | def patched(*args, **keywargs):\n | ^^^^^^^^^^^^^^^^^^^^\n | File "C:\\NeoZQYY\\.venv\\Lib\\site-packages\\hypothesis\\core.py", line 1041, in test\n | raise DeadlineExceeded(\n | datetime.timedelta(seconds=runtime), self.settings.deadline\n | )\n | hypothesis.errors.DeadlineExceeded: Test took 285.80ms, which exceeds the deadline of 200.00ms. If you expect test cases to take this long, you can use @settings(deadline=...) to either set a higher deadline, or to disable it with deadline=None.\n +------------------------------------\n=========================== short test summary info ===========================
|
||||
FAILED tests/test_auth_properties.py::test_invalid_credentials_always_rejected
|
||||
1 failed, 29 passed in 11.59s
|
||||
42
apps/backend/auth_only_results.txt
Normal file
42
apps/backend/auth_only_results.txt
Normal file
@@ -0,0 +1,42 @@
|
||||
============================= test session starts =============================
|
||||
platform win32 -- Python 3.13.9, pytest-9.0.2, pluggy-1.6.0 -- C:\NeoZQYY\.venv\Scripts\python.exe
|
||||
cachedir: .pytest_cache
|
||||
hypothesis profile 'default'
|
||||
rootdir: C:\NeoZQYY\apps\backend
|
||||
configfile: pyproject.toml
|
||||
plugins: anyio-4.12.1, hypothesis-6.151.6, asyncio-1.3.0
|
||||
asyncio: mode=Mode.STRICT, debug=False, asyncio_default_fixture_loop_scope=None, asyncio_default_test_loop_scope=function
|
||||
collecting ... collected 30 items
|
||||
|
||||
tests/test_auth_jwt.py::TestPasswordHashing::test_hash_and_verify PASSED [ 3%]
|
||||
tests/test_auth_jwt.py::TestPasswordHashing::test_wrong_password_rejected PASSED [ 6%]
|
||||
tests/test_auth_jwt.py::TestPasswordHashing::test_hash_is_not_plaintext PASSED [ 10%]
|
||||
tests/test_auth_jwt.py::TestTokenCreation::test_access_token_contains_expected_fields PASSED [ 13%]
|
||||
tests/test_auth_jwt.py::TestTokenCreation::test_refresh_token_contains_expected_fields PASSED [ 16%]
|
||||
tests/test_auth_jwt.py::TestTokenCreation::test_token_pair_returns_both_tokens PASSED [ 20%]
|
||||
tests/test_auth_jwt.py::TestTokenTypeValidation::test_decode_access_token_rejects_refresh PASSED [ 23%]
|
||||
tests/test_auth_jwt.py::TestTokenTypeValidation::test_decode_refresh_token_rejects_access PASSED [ 26%]
|
||||
tests/test_auth_jwt.py::TestTokenTypeValidation::test_decode_access_token_accepts_access PASSED [ 30%]
|
||||
tests/test_auth_jwt.py::TestTokenTypeValidation::test_decode_refresh_token_accepts_refresh PASSED [ 33%]
|
||||
tests/test_auth_jwt.py::TestTokenExpiry::test_expired_token_rejected PASSED [ 36%]
|
||||
tests/test_auth_jwt.py::TestInvalidToken::test_garbage_token_rejected PASSED [ 40%]
|
||||
tests/test_auth_jwt.py::TestInvalidToken::test_wrong_secret_rejected PASSED [ 43%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_valid_access_token PASSED [ 46%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_missing_auth_header_returns_401 PASSED [ 50%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_invalid_token_returns_401 PASSED [ 53%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_refresh_token_rejected PASSED [ 56%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_current_user_is_frozen_dataclass PASSED [ 60%]
|
||||
tests/test_auth_properties.py::test_invalid_credentials_always_rejected PASSED [ 63%]
|
||||
tests/test_auth_properties.py::test_valid_jwt_grants_access PASSED [ 66%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_success PASSED [ 70%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_user_not_found PASSED [ 73%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_wrong_password PASSED [ 76%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_disabled_account PASSED [ 80%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_missing_username PASSED [ 83%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_empty_password PASSED [ 86%]
|
||||
tests/test_auth_router.py::TestRefresh::test_refresh_success PASSED [ 90%]
|
||||
tests/test_auth_router.py::TestRefresh::test_refresh_with_invalid_token PASSED [ 93%]
|
||||
tests/test_auth_router.py::TestRefresh::test_refresh_with_access_token_rejected PASSED [ 96%]
|
||||
tests/test_auth_router.py::TestRefresh::test_refresh_missing_token PASSED [100%]
|
||||
|
||||
============================= 30 passed in 9.92s ==============================
|
||||
42
apps/backend/auth_test_results.txt
Normal file
42
apps/backend/auth_test_results.txt
Normal file
@@ -0,0 +1,42 @@
|
||||
============================= test session starts =============================
|
||||
platform win32 -- Python 3.13.9, pytest-9.0.2, pluggy-1.6.0 -- C:\NeoZQYY\.venv\Scripts\python.exe
|
||||
cachedir: .pytest_cache
|
||||
hypothesis profile 'default'
|
||||
rootdir: C:\NeoZQYY\apps\backend
|
||||
configfile: pyproject.toml
|
||||
plugins: anyio-4.12.1, hypothesis-6.151.6, asyncio-1.3.0
|
||||
asyncio: mode=Mode.STRICT, debug=False, asyncio_default_fixture_loop_scope=None, asyncio_default_test_loop_scope=function
|
||||
collecting ... collected 30 items
|
||||
|
||||
tests/test_auth_jwt.py::TestPasswordHashing::test_hash_and_verify PASSED [ 3%]
|
||||
tests/test_auth_jwt.py::TestPasswordHashing::test_wrong_password_rejected PASSED [ 6%]
|
||||
tests/test_auth_jwt.py::TestPasswordHashing::test_hash_is_not_plaintext PASSED [ 10%]
|
||||
tests/test_auth_jwt.py::TestTokenCreation::test_access_token_contains_expected_fields PASSED [ 13%]
|
||||
tests/test_auth_jwt.py::TestTokenCreation::test_refresh_token_contains_expected_fields PASSED [ 16%]
|
||||
tests/test_auth_jwt.py::TestTokenCreation::test_token_pair_returns_both_tokens PASSED [ 20%]
|
||||
tests/test_auth_jwt.py::TestTokenTypeValidation::test_decode_access_token_rejects_refresh PASSED [ 23%]
|
||||
tests/test_auth_jwt.py::TestTokenTypeValidation::test_decode_refresh_token_rejects_access PASSED [ 26%]
|
||||
tests/test_auth_jwt.py::TestTokenTypeValidation::test_decode_access_token_accepts_access PASSED [ 30%]
|
||||
tests/test_auth_jwt.py::TestTokenTypeValidation::test_decode_refresh_token_accepts_refresh PASSED [ 33%]
|
||||
tests/test_auth_jwt.py::TestTokenExpiry::test_expired_token_rejected PASSED [ 36%]
|
||||
tests/test_auth_jwt.py::TestInvalidToken::test_garbage_token_rejected PASSED [ 40%]
|
||||
tests/test_auth_jwt.py::TestInvalidToken::test_wrong_secret_rejected PASSED [ 43%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_valid_access_token PASSED [ 46%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_missing_auth_header_returns_401 PASSED [ 50%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_invalid_token_returns_401 PASSED [ 53%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_refresh_token_rejected PASSED [ 56%]
|
||||
tests/test_auth_dependencies.py::TestGetCurrentUser::test_current_user_is_frozen_dataclass PASSED [ 60%]
|
||||
tests/test_auth_properties.py::test_invalid_credentials_always_rejected PASSED [ 63%]
|
||||
tests/test_auth_properties.py::test_valid_jwt_grants_access PASSED [ 66%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_success PASSED [ 70%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_user_not_found PASSED [ 73%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_wrong_password PASSED [ 76%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_disabled_account PASSED [ 80%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_missing_username PASSED [ 83%]
|
||||
tests/test_auth_router.py::TestLogin::test_login_empty_password PASSED [ 86%]
|
||||
tests/test_auth_router.py::TestRefresh::test_refresh_success PASSED [ 90%]
|
||||
tests/test_auth_router.py::TestRefresh::test_refresh_with_invalid_token PASSED [ 93%]
|
||||
tests/test_auth_router.py::TestRefresh::test_refresh_with_access_token_rejected PASSED [ 96%]
|
||||
tests/test_auth_router.py::TestRefresh::test_refresh_missing_token PASSED [100%]
|
||||
|
||||
============================= 30 passed in 10.76s =============================
|
||||
349
apps/backend/docs/API-REFERENCE.md
Normal file
349
apps/backend/docs/API-REFERENCE.md
Normal file
@@ -0,0 +1,349 @@
|
||||
# API 参考手册
|
||||
|
||||
后端 API 基于 FastAPI 构建,所有端点均以 `/api/` 为前缀。
|
||||
在线文档:启动后访问 `http://localhost:8000/docs`(Swagger UI)或 `/redoc`(ReDoc)。
|
||||
|
||||
---
|
||||
|
||||
## 1. 管理后台认证 `/api/auth`
|
||||
|
||||
### POST `/api/auth/login`
|
||||
管理后台用户名密码登录。
|
||||
|
||||
请求体:
|
||||
```json
|
||||
{ "username": "admin", "password": "..." }
|
||||
```
|
||||
响应:
|
||||
```json
|
||||
{ "access_token": "...", "refresh_token": "...", "token_type": "bearer" }
|
||||
```
|
||||
|
||||
### POST `/api/auth/refresh`
|
||||
刷新访问令牌。
|
||||
|
||||
请求体:
|
||||
```json
|
||||
{ "refresh_token": "..." }
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. 小程序认证 `/api/xcx-auth`
|
||||
|
||||
小程序用户的完整生命周期:微信登录 → 提交申请 → 管理员审批 → 正式使用。
|
||||
|
||||
### POST `/api/xcx-auth/login`
|
||||
微信登录。用 `wx.login()` 获取的 code 换取 JWT。
|
||||
|
||||
请求体:
|
||||
```json
|
||||
{ "code": "微信临时登录凭证" }
|
||||
```
|
||||
响应:
|
||||
```json
|
||||
{
|
||||
"access_token": "...",
|
||||
"refresh_token": "...",
|
||||
"token_type": "bearer",
|
||||
"user_status": "pending | approved | rejected | disabled",
|
||||
"user_id": 1
|
||||
}
|
||||
```
|
||||
说明:
|
||||
- 首次登录自动创建 `auth.users` 记录(status=pending)
|
||||
- pending 用户获得受限令牌(`limited=True`),仅可访问申请相关端点
|
||||
- approved 用户获得完整令牌,包含 `site_id` 和 `roles`
|
||||
|
||||
### POST `/api/xcx-auth/apply`
|
||||
提交入驻申请。需受限令牌或完整令牌。
|
||||
|
||||
请求体:
|
||||
```json
|
||||
{
|
||||
"site_code": "AB123",
|
||||
"applied_role_text": "助教",
|
||||
"phone": "13800138000",
|
||||
"employee_number": "E001",
|
||||
"nickname": "张三"
|
||||
}
|
||||
```
|
||||
说明:
|
||||
- `site_code` 格式:2 字母 + 3 数字(如 `AB123`),映射到 `auth.site_code_mapping`
|
||||
- 后端自动进行人员匹配(`matching.py`),在 ETL 库中查找助教/员工记录
|
||||
|
||||
### GET `/api/xcx-auth/status`
|
||||
查询当前用户状态和申请记录。需受限令牌或完整令牌。
|
||||
|
||||
响应:
|
||||
```json
|
||||
{
|
||||
"user_id": 1,
|
||||
"status": "approved",
|
||||
"nickname": "张三",
|
||||
"applications": [
|
||||
{
|
||||
"id": 1,
|
||||
"site_code": "AB123",
|
||||
"applied_role_text": "助教",
|
||||
"status": "approved",
|
||||
"review_note": null,
|
||||
"created_at": "2026-02-25T10:00:00",
|
||||
"reviewed_at": "2026-02-25T11:00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### GET `/api/xcx-auth/sites`
|
||||
获取当前用户关联的门店列表。需完整令牌。
|
||||
|
||||
### POST `/api/xcx-auth/switch-site`
|
||||
切换当前门店,返回新的令牌对。需完整令牌。
|
||||
|
||||
请求体:
|
||||
```json
|
||||
{ "site_id": 2 }
|
||||
```
|
||||
|
||||
### POST `/api/xcx-auth/refresh`
|
||||
刷新令牌。
|
||||
|
||||
请求体:
|
||||
```json
|
||||
{ "refresh_token": "..." }
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. 任务配置 `/api/tasks`
|
||||
|
||||
所有端点需 JWT 认证。
|
||||
|
||||
### GET `/api/tasks/registry`
|
||||
按业务域分组的 ETL 任务列表。
|
||||
|
||||
响应示例:
|
||||
```json
|
||||
{
|
||||
"groups": {
|
||||
"会员": [
|
||||
{
|
||||
"code": "DWD_LOAD_FROM_ODS",
|
||||
"name": "ODS → DWD 加载",
|
||||
"domain": "会员",
|
||||
"layer": "DWD",
|
||||
"requires_window": true,
|
||||
"is_ods": false,
|
||||
"is_dimension": false,
|
||||
"default_enabled": true,
|
||||
"is_common": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### GET `/api/tasks/dwd-tables`
|
||||
按业务域分组的 DWD 表定义。
|
||||
|
||||
### GET `/api/tasks/flows`
|
||||
返回 7 种 Flow 定义和 4 种处理模式。
|
||||
|
||||
Flow 列表:
|
||||
| ID | 名称 | 层级 |
|
||||
|----|------|------|
|
||||
| `api_ods` | API → ODS | ODS |
|
||||
| `api_ods_dwd` | API → ODS → DWD | ODS, DWD |
|
||||
| `api_full` | API → ODS → DWD → DWS → INDEX | ODS, DWD, DWS, INDEX |
|
||||
| `ods_dwd` | ODS → DWD | DWD |
|
||||
| `dwd_dws` | DWD → DWS汇总 | DWS |
|
||||
| `dwd_dws_index` | DWD → DWS → INDEX | DWS, INDEX |
|
||||
| `dwd_index` | DWD → DWS指数 | INDEX |
|
||||
|
||||
处理模式:
|
||||
| ID | 名称 | 说明 |
|
||||
|----|------|------|
|
||||
| `increment_only` | 仅增量处理 | 只处理新增和变更的数据 |
|
||||
| `verify_only` | 仅校验修复 | 校验现有数据并修复不一致 |
|
||||
| `increment_verify` | 增量 + 校验修复 | 先增量处理,再校验并修复 |
|
||||
| `full_window` | 全窗口处理 | 用 API 返回数据的实际时间范围处理全部层 |
|
||||
|
||||
### POST `/api/tasks/validate`
|
||||
验证任务配置并返回 CLI 命令预览。`store_id` 从 JWT 自动注入。
|
||||
|
||||
### GET `/api/tasks/sync-check`
|
||||
对比后端硬编码任务列表与 ETL 真实注册表,返回差异。
|
||||
|
||||
---
|
||||
|
||||
## 4. 任务执行 `/api/execution`
|
||||
|
||||
所有端点需 JWT 认证,`site_id` 从 JWT 提取。
|
||||
|
||||
### POST `/api/execution/run`
|
||||
直接执行任务(不经过队列)。异步启动 ETL CLI 子进程。
|
||||
|
||||
请求体:`TaskConfigSchema`(flow、tasks、window 等)
|
||||
|
||||
响应:
|
||||
```json
|
||||
{ "execution_id": "uuid", "message": "任务已提交执行" }
|
||||
```
|
||||
|
||||
### GET `/api/execution/queue`
|
||||
获取当前门店的待执行队列。
|
||||
|
||||
### POST `/api/execution/queue`
|
||||
将任务配置添加到执行队列。
|
||||
|
||||
### PUT `/api/execution/queue/reorder`
|
||||
调整队列中任务的执行顺序。
|
||||
|
||||
### DELETE `/api/execution/queue/{task_id}`
|
||||
从队列中删除待执行任务(仅 pending 状态)。
|
||||
|
||||
### POST `/api/execution/{execution_id}/cancel`
|
||||
取消正在执行的任务。
|
||||
|
||||
### GET `/api/execution/history`
|
||||
执行历史记录(按 `started_at` 降序,默认 50 条,最多 200 条)。
|
||||
|
||||
### GET `/api/execution/{execution_id}/logs`
|
||||
获取指定执行的完整日志。优先从内存缓冲区读取(执行中),否则从数据库读取(已完成)。
|
||||
|
||||
---
|
||||
|
||||
## 5. 调度管理 `/api/schedules`
|
||||
|
||||
所有端点需 JWT 认证。
|
||||
|
||||
### GET `/api/schedules`
|
||||
列出当前门店的所有调度任务。
|
||||
|
||||
### POST `/api/schedules`
|
||||
创建调度任务,自动计算 `next_run_at`。
|
||||
|
||||
### PUT `/api/schedules/{schedule_id}`
|
||||
更新调度任务(部分更新,仅更新请求中提供的字段)。
|
||||
|
||||
### DELETE `/api/schedules/{schedule_id}`
|
||||
删除调度任务。
|
||||
|
||||
### PATCH `/api/schedules/{schedule_id}/toggle`
|
||||
切换启用/禁用状态。禁用时 `next_run_at` 置 NULL;启用时重新计算。
|
||||
|
||||
---
|
||||
|
||||
## 6. 数据库查看器 `/api/db`
|
||||
|
||||
所有端点需 JWT 认证。使用 ETL 只读连接 + RLS 门店隔离。
|
||||
|
||||
### GET `/api/db/schemas`
|
||||
返回 ETL 数据库中的 Schema 列表。
|
||||
|
||||
### GET `/api/db/schemas/{name}/tables`
|
||||
返回指定 Schema 下所有表的名称和行数统计。
|
||||
|
||||
### GET `/api/db/tables/{schema}/{table}/columns`
|
||||
返回指定表的列定义(列名、数据类型、是否可空、默认值)。
|
||||
|
||||
### POST `/api/db/query`
|
||||
只读 SQL 执行。
|
||||
|
||||
安全措施:
|
||||
- 拦截写操作关键词(INSERT / UPDATE / DELETE / DROP / TRUNCATE)
|
||||
- 返回行数上限 1000 行
|
||||
- 查询超时 30 秒
|
||||
- 连接级 `read_only` 保护
|
||||
|
||||
请求体:
|
||||
```json
|
||||
{ "sql": "SELECT * FROM dwd.member_info LIMIT 10" }
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 7. ETL 状态 `/api/etl-status`
|
||||
|
||||
### GET `/api/etl-status/cursors`
|
||||
返回各 ODS 表的最新数据游标(查询 `meta.etl_cursor`)。
|
||||
|
||||
### GET `/api/etl-status/recent-runs`
|
||||
返回最近 50 条任务执行记录。
|
||||
|
||||
---
|
||||
|
||||
## 8. 环境配置 `/api/env-config`
|
||||
|
||||
### GET `/api/env-config`
|
||||
读取根 `.env` 文件内容(敏感值脱敏显示)。
|
||||
|
||||
### PUT `/api/env-config`
|
||||
更新 `.env` 文件中的配置项。
|
||||
|
||||
---
|
||||
|
||||
## 9. 运维面板 `/api/ops`
|
||||
|
||||
### GET `/api/ops/system`
|
||||
服务器系统资源概况(CPU、内存、磁盘、启动时间)。
|
||||
|
||||
### GET `/api/ops/services`
|
||||
所有环境(test/prod)的服务运行状态(PID、端口、内存、CPU、运行时长)。
|
||||
|
||||
### POST `/api/ops/services/{env}/start`
|
||||
启动指定环境的后端服务。
|
||||
|
||||
### POST `/api/ops/services/{env}/stop`
|
||||
停止指定环境的后端服务。
|
||||
|
||||
### POST `/api/ops/services/{env}/restart`
|
||||
重启指定环境的后端服务。
|
||||
|
||||
### GET `/api/ops/git`
|
||||
所有环境的 Git 状态(分支、最新提交、是否有本地修改)。
|
||||
|
||||
### POST `/api/ops/git/{env}/pull`
|
||||
对指定环境执行 `git pull --ff-only`。
|
||||
|
||||
### POST `/api/ops/git/{env}/sync-deps`
|
||||
对指定环境执行 `uv sync --all-packages`。
|
||||
|
||||
### GET `/api/ops/env-file/{env}`
|
||||
读取指定环境的 `.env` 文件(敏感值脱敏)。
|
||||
|
||||
---
|
||||
|
||||
## 10. 其他端点
|
||||
|
||||
### GET `/health`
|
||||
健康检查。返回 `{"status": "ok"}`。
|
||||
|
||||
### GET `/api/xcx-test`
|
||||
MVP 全链路验证端点,从 `test."xcx-test"` 表读取数据。
|
||||
|
||||
### GET/POST `/api/wx-callback`
|
||||
微信消息推送回调。GET 用于签名验证,POST 用于接收消息。
|
||||
|
||||
### POST `/api/member-birthday`
|
||||
助教手动补录会员生日。
|
||||
|
||||
### WebSocket `/ws/logs/{execution_id}`
|
||||
实时日志推送。连接后自动接收指定执行的日志流。
|
||||
|
||||
---
|
||||
|
||||
## 错误码约定
|
||||
|
||||
| HTTP 状态码 | 含义 |
|
||||
|-------------|------|
|
||||
| 200 | 成功 |
|
||||
| 201 | 创建成功 |
|
||||
| 400 | 请求参数错误 / SQL 执行错误 |
|
||||
| 401 | 未认证 / 令牌无效 / 受限令牌 |
|
||||
| 404 | 资源不存在 |
|
||||
| 408 | 查询超时 |
|
||||
| 409 | 状态冲突(如删除非 pending 任务) |
|
||||
| 422 | 请求体验证失败 |
|
||||
| 500 | 服务器内部错误 |
|
||||
94
apps/backend/test_results.txt
Normal file
94
apps/backend/test_results.txt
Normal file
@@ -0,0 +1,94 @@
|
||||
..................................................................F..... [ 23%]
|
||||
........................................................................ [ 47%]
|
||||
........................................................................ [ 71%]
|
||||
....F..FFFFFFFF.F...........................................F..F........ [ 95%]
|
||||
............... [100%]
|
||||
================================== FAILURES ===================================
|
||||
__________ TestGetEtlReadonlyConnection.test_uses_etl_config_params ___________
|
||||
tests\test_database.py:94: in test_uses_etl_config_params
|
||||
assert connect_kwargs["dbname"] == "etl_feiqiu"
|
||||
E AssertionError: assert 'test_etl_feiqiu' == 'etl_feiqiu'
|
||||
E
|
||||
E - etl_feiqiu
|
||||
E + test_etl_feiqiu
|
||||
E ? +++++
|
||||
__________ TestRunningState.test_is_running_true_when_process_active __________
|
||||
tests\test_task_executor.py:118: in test_is_running_true_when_process_active
|
||||
assert executor.is_running("exec-1") is True
|
||||
E AssertionError: assert False is True
|
||||
E + where False = is_running('exec-1')
|
||||
E + where is_running = <app.services.task_executor.TaskExecutor object at 0x000001C9442CBD40>.is_running
|
||||
____________________ TestReadStream.test_read_stdout_lines ____________________
|
||||
tests\test_task_executor.py:146: in test_read_stdout_lines
|
||||
await executor._read_stream("exec-1", stream, "stdout", collector)
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
E AttributeError: 'TaskExecutor' object has no attribute '_read_stream'
|
||||
____________________ TestReadStream.test_read_stderr_lines ____________________
|
||||
tests\test_task_executor.py:158: in test_read_stderr_lines
|
||||
await executor._read_stream("exec-1", stream, "stderr", collector)
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
E AttributeError: 'TaskExecutor' object has no attribute '_read_stream'
|
||||
________________ TestReadStream.test_read_stream_none_is_safe _________________
|
||||
tests\test_task_executor.py:166: in test_read_stream_none_is_safe
|
||||
await executor._read_stream("exec-1", None, "stdout", collector)
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
E AttributeError: 'TaskExecutor' object has no attribute '_read_stream'
|
||||
__________________ TestReadStream.test_broadcast_during_read __________________
|
||||
tests\test_task_executor.py:175: in test_broadcast_during_read
|
||||
await executor._read_stream("exec-1", stream, "stdout", collector)
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
E AttributeError: 'TaskExecutor' object has no attribute '_read_stream'
|
||||
____________________ TestExecute.test_successful_execution ____________________
|
||||
tests\test_task_executor.py:218: in test_successful_execution
|
||||
assert "processing..." in update_kwargs["output_log"]
|
||||
E AssertionError: assert 'processing...' in ''
|
||||
______________________ TestExecute.test_failed_execution ______________________
|
||||
tests\test_task_executor.py:245: in test_failed_execution
|
||||
assert update_kwargs["status"] == "failed"
|
||||
E AssertionError: assert 'success' == 'failed'
|
||||
E
|
||||
E - failed
|
||||
E + success
|
||||
_________________ TestExecute.test_exception_during_execution _________________
|
||||
tests\test_task_executor.py:263: in test_exception_during_execution
|
||||
assert update_kwargs["status"] == "failed"
|
||||
E AssertionError: assert 'success' == 'failed'
|
||||
E
|
||||
E - failed
|
||||
E + success
|
||||
_____________ TestExecute.test_subscribers_notified_on_completion _____________
|
||||
tests\test_task_executor.py:290: in test_subscribers_notified_on_completion
|
||||
assert "[stdout] line" in messages
|
||||
E AssertionError: assert '[stdout] line' in ['[stderr] [2026-02-25 02:42:10] INFO | etl_billiards | 配置加载完成', '[stderr] [2026-02-25 02:42:10] INFO | etl_billiard... 02:42:11] INFO | etl_billiards | 使用回溯时间窗口: 2026-02-24 02:42:11.615145+08:00 ~ 2026-02-25 02:42:11.615145+08:00', ...]
|
||||
___________________ TestCancel.test_cancel_running_process ____________________
|
||||
tests\test_task_executor.py:331: in test_cancel_running_process
|
||||
assert result is True
|
||||
E assert False is True
|
||||
_____________ TestFlows.test_flows_returns_three_processing_modes _____________
|
||||
tests\test_tasks_router.py:124: in test_flows_returns_three_processing_modes
|
||||
assert len(data["processing_modes"]) == 3
|
||||
E AssertionError: assert 4 == 3
|
||||
E + where 4 = len([{'description': '只处理新增和变更的数据', 'id': 'increment_only', 'name': '仅增量处理'}, {'description': '校验现有数据并修复不一致', 'id': 'verif...nt_verify', 'name': '增量 + 校验修复'}, {'description': '用 API 返回数据的实际时间范围处理全部层,无需校验', 'id': 'full_window', 'name': '全窗口处理'}])
|
||||
_____________________ TestFlows.test_processing_mode_ids ______________________
|
||||
tests\test_tasks_router.py:143: in test_processing_mode_ids
|
||||
assert mode_ids == {"increment_only", "verify_only", "increment_verify"}
|
||||
E AssertionError: assert {'full_window...'verify_only'} == {'increment_o...'verify_only'}
|
||||
E
|
||||
E Extra items in the left set:
|
||||
E 'full_window'
|
||||
E Use -v to get more diff
|
||||
=========================== short test summary info ===========================
|
||||
FAILED tests/test_database.py::TestGetEtlReadonlyConnection::test_uses_etl_config_params
|
||||
FAILED tests/test_task_executor.py::TestRunningState::test_is_running_true_when_process_active
|
||||
FAILED tests/test_task_executor.py::TestReadStream::test_read_stdout_lines - ...
|
||||
FAILED tests/test_task_executor.py::TestReadStream::test_read_stderr_lines - ...
|
||||
FAILED tests/test_task_executor.py::TestReadStream::test_read_stream_none_is_safe
|
||||
FAILED tests/test_task_executor.py::TestReadStream::test_broadcast_during_read
|
||||
FAILED tests/test_task_executor.py::TestExecute::test_successful_execution - ...
|
||||
FAILED tests/test_task_executor.py::TestExecute::test_failed_execution - Asse...
|
||||
FAILED tests/test_task_executor.py::TestExecute::test_exception_during_execution
|
||||
FAILED tests/test_task_executor.py::TestExecute::test_subscribers_notified_on_completion
|
||||
FAILED tests/test_task_executor.py::TestCancel::test_cancel_running_process
|
||||
FAILED tests/test_tasks_router.py::TestFlows::test_flows_returns_three_processing_modes
|
||||
FAILED tests/test_tasks_router.py::TestFlows::test_processing_mode_ids - Asse...
|
||||
13 failed, 290 passed in 146.30s (0:02:26)
|
||||
@@ -5,6 +5,26 @@
|
||||
|
||||
---
|
||||
|
||||
## 2026-02-26
|
||||
|
||||
### 文档审计 — 任务统计、业务口径、SCD2 规则全面校正
|
||||
|
||||
- **摘要**:对 `docs/` 下 9 个文档进行系统性审计与修正,以 `task_registry.py` 为唯一事实来源,修复任务数量、域分组、表名、工具链引用等 15 处不一致
|
||||
- **修正清单**:
|
||||
- `docs/README.md`:移除不存在的 `audit/`、`reports/` 目录引用
|
||||
- `docs/etl_tasks/README.md`:ODS 22→23、DWS 13→17、补充 `ODS_STAFF_INFO`/`DWS_ASSISTANT_ORDER_CONTRIBUTION`/库存汇总域/SPI
|
||||
- `docs/etl_tasks/ods_tasks.md`:任务总数 22→23
|
||||
- `docs/etl_tasks/dws_tasks.md`:任务总数 14→17、域分组四组→五组、补充库存汇总域概述表格
|
||||
- `docs/etl_tasks/base_task_mechanism.md`:DWS 13→17、INDEX 4→5
|
||||
- `docs/operations/environment_setup.md`:移除重复段落
|
||||
- `docs/operations/troubleshooting.md`:`pip install -r requirements.txt` → `uv sync`
|
||||
- `docs/business-rules/dws_metrics.md`:完全重写,移除不存在的表、修正表名、补充库存汇总域和指数算法章节
|
||||
- `docs/business-rules/scd2_rules.md`:完全重写,填充 9 个维度表的实际跟踪字段、补充 `dim_staff` 维度、文档化变更检测机制
|
||||
- **影响范围**:文档(`docs/` 全目录,无代码变更)
|
||||
- **风险**:极低(纯文档修正)
|
||||
|
||||
---
|
||||
|
||||
## 2026-02-19
|
||||
|
||||
### 文档全面刷新 — Schema 名称、技术栈、任务统计同步至项目现状
|
||||
|
||||
@@ -6,8 +6,6 @@
|
||||
|-------------|------|
|
||||
| [`architecture/`](architecture/README.md) | 架构设计文档 — 系统整体架构、数据流向(ODS→DWD→DWS)、模块交互关系 |
|
||||
| [`api-reference/`](api-reference/) | API 参考文档(25 个端点的标准化文档 + JSON 样本) |
|
||||
| [`audit/`](audit/README.md) | 审计目录(历史归档,新记录已迁移至根 `docs/audit/`) |
|
||||
| [`audit/repo/`](audit/repo/) | 仓库审计报告(由 `scripts/audit/` 自动生成:文件清单、调用流、文档对齐) |
|
||||
| [`business-rules/`](business-rules/README.md) | 业务规则文档 — 指数算法、DWS 口径定义、SCD2 处理规则等业务逻辑 |
|
||||
| [`database/`](database/README.md) | 数据库文档统一目录 — 层级概览 + ODS/DWD/DWS/ETL_Admin 表级文档 |
|
||||
| [`database/overview/`](database/overview/) | 层级概览 / 速查索引(表清单、主键、记录数、业务域分类) |
|
||||
@@ -17,8 +15,6 @@
|
||||
| [`database/ETL_Admin/`](database/ETL_Admin/) | ETL 管理层表手册(etl_cursor/etl_run/etl_task) |
|
||||
| [`etl_tasks/`](etl_tasks/README.md) | ETL 任务文档(ODS/DWD/DWS/指数任务说明与机制) |
|
||||
| [`operations/`](operations/README.md) | 运维文档 — 环境搭建指南、调度配置说明、故障排查手册 |
|
||||
| [`reports/`](reports/) | 分析报告(数据质量、一致性检查等输出) |
|
||||
| [`requirements/`](requirements/) | 需求文档(功能需求、口径补充、指数 PRD 等) |
|
||||
| [`CHANGELOG.md`](CHANGELOG.md) | 项目级版本变更历史(日期、变更摘要、影响范围) |
|
||||
|
||||
## 维护约定
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
本文档定义 `dws` 模式下各汇总指标的业务口径、计算规则和数据来源。
|
||||
所有指标均基于 DWD 明细层数据聚合生成。
|
||||
|
||||
> **状态**:骨架文档,各章节待补充具体计算公式与字段映射。
|
||||
> 各任务的详细实现(数据来源表、输出字段、核心逻辑)请参阅 [dws_tasks.md](../etl_tasks/dws_tasks.md)。
|
||||
|
||||
---
|
||||
|
||||
@@ -11,39 +11,51 @@
|
||||
|
||||
### 1.1 助教日报(dws_assistant_daily_detail)
|
||||
|
||||
<!-- 待补充:日维度助教业绩指标定义,包括订单数、服务时长、收入等 -->
|
||||
|
||||
- 目标表:`dws.dws_assistant_daily_detail`
|
||||
- 数据来源:DWD 订单事实表、助教维度表
|
||||
- 数据来源:`dwd_assistant_service_log`、`dwd_assistant_trash_event`、`dim_assistant`(SCD2)
|
||||
- 粒度:门店 × 助教 × 日期
|
||||
- 核心指标:*(待定义)*
|
||||
- 核心指标:服务次数(总/基础课/附加课/包厢课)、计费秒数与小时数、台账金额、去重客户数与台桌数、废除统计
|
||||
- 课程类型分类:通过 `cfg_skill_type` 映射 `skill_id` → `BASE`/`BONUS`/`ROOM`
|
||||
- 助教等级:SCD2 as-of 取值,取统计日当日生效的等级版本
|
||||
|
||||
### 1.2 助教月报(dws_assistant_monthly_summary)
|
||||
|
||||
<!-- 待补充:月维度助教业绩汇总指标 -->
|
||||
|
||||
- 目标表:`dws.dws_assistant_monthly_summary`
|
||||
- 数据来源:助教日报聚合
|
||||
- 数据来源:`dws_assistant_daily_detail` 聚合 + `dwd_assistant_service_log`(月度去重)+ `dim_assistant` + `cfg_performance_tier`
|
||||
- 粒度:门店 × 助教 × 年月
|
||||
- 核心指标:*(待定义)*
|
||||
- 核心指标:月度累计服务次数/时长/金额、有效业绩小时数(`total_hours - trashed_hours`)、绩效档位匹配、排名(考虑并列)
|
||||
- 新入职判断:入职日期在当月 1 日后即视为新入职,档位匹配按日均折算 30 天
|
||||
- 月度去重客户/台桌:从 DWD 直接去重,避免日度求和失真
|
||||
|
||||
### 1.3 助教客户统计(dws_assistant_customer_stats)
|
||||
|
||||
<!-- 待补充:助教服务客户维度的统计指标 -->
|
||||
|
||||
- 目标表:`dws.dws_assistant_customer_stats`
|
||||
- 数据来源:DWD 订单事实表、会员维度表
|
||||
- 数据来源:`dwd_assistant_service_log`、`dim_member`、`dim_assistant`
|
||||
- 粒度:门店 × 助教 × 会员
|
||||
- 核心指标:*(待定义)*
|
||||
- 核心指标:全量累计(首次/最近服务日期、累计次数/时长/金额)、6 个滚动窗口(7/10/15/30/60/90 天)、活跃度判定
|
||||
- 散客排除:`member_id` 为 0 或 None 不进入统计
|
||||
- HAVING 过滤:仅保留最近 90 天内有服务记录的助教-客户对
|
||||
|
||||
### 1.4 助教财务分析(dws_assistant_finance_analysis)
|
||||
|
||||
<!-- 待补充:助教维度的财务分析指标 -->
|
||||
|
||||
- 目标表:`dws.dws_assistant_finance_analysis`
|
||||
- 数据来源:DWD 支付/退款事实表
|
||||
- 数据来源:`dwd_assistant_service_log`、`cfg_skill_type`、`dws_assistant_salary_calc`、`dws_assistant_daily_detail`
|
||||
- 粒度:门店 × 助教 × 日期
|
||||
- 核心指标:*(待定义)*
|
||||
- 核心指标:日度收入(总/基础课/附加课/包厢课)、日均成本(`gross_salary / work_days`)、毛利润与毛利率
|
||||
- 依赖:`DWS_ASSISTANT_SALARY` 和 `DWS_ASSISTANT_DAILY` 的输出
|
||||
|
||||
### 1.5 助教订单流水贡献(dws_assistant_order_contribution)
|
||||
|
||||
- 目标表:`dws.dws_assistant_order_contribution`
|
||||
- 数据来源:`dwd_settlement_head`、`dwd_table_fee_log`、`dwd_assistant_service_log`
|
||||
- 粒度:门店 × 助教 × 日期
|
||||
- 核心指标(四项统计):
|
||||
- `order_gross_revenue`:订单总流水(台费 + 酒水食品 + 所有助教服务费)
|
||||
- `order_net_revenue`:订单净流水(总流水 - 所有助教服务分成)
|
||||
- `time_weighted_revenue`:时效贡献流水(按服务时长占比分摊)
|
||||
- `time_weighted_net_revenue`:时效净贡献(时效贡献 - 个人服务分成)
|
||||
- 超休/打赏课特殊处理:`course_type = BONUS` 的助教不参与订单级分摊
|
||||
- 依赖:`DWD_LOAD_FROM_ODS`
|
||||
|
||||
---
|
||||
|
||||
@@ -51,21 +63,17 @@
|
||||
|
||||
### 2.1 助教薪酬(dws_assistant_salary_calc)
|
||||
|
||||
<!-- 待补充:薪酬计算规则、提成比例、结算周期 -->
|
||||
|
||||
- 目标表:`dws.dws_assistant_salary_calc`
|
||||
- 数据来源:助教日报/月报、充值提成
|
||||
- 粒度:门店 × 助教 × 结算周期
|
||||
- 核心指标:*(待定义)*
|
||||
|
||||
### 2.2 充值提成(dws_assistant_recharge_commission)
|
||||
|
||||
<!-- 待补充:充值提成计算规则 -->
|
||||
|
||||
- 目标表:`dws.dws_assistant_recharge_commission`
|
||||
- 数据来源:DWD 充值事实表
|
||||
- 粒度:门店 × 助教 × 日期
|
||||
- 核心指标:*(待定义)*
|
||||
- 数据来源:`dws_assistant_monthly_summary`、`dws_assistant_recharge_commission`、`cfg_performance_tier`、`cfg_assistant_level_price`、`cfg_bonus_rules`
|
||||
- 粒度:门店 × 助教 × 结算月份
|
||||
- 核心公式:
|
||||
- 应发工资 = 课时收入 + 奖金合计
|
||||
- 基础课收入 = `base_hours × (base_course_price - base_deduction)`
|
||||
- 附加课收入 = `bonus_hours × bonus_course_price × (1 - bonus_deduction_ratio)`
|
||||
- 包厢课收入 = `room_hours × (room_course_price - base_deduction)`
|
||||
- 奖金合计 = 冲刺奖金 + Top3 排名奖金 + 充值提成 + 其他奖金
|
||||
- 等级定价:SCD2 按月份取历史生效值
|
||||
- 运行调度:默认仅月初前 5 天运行
|
||||
|
||||
---
|
||||
|
||||
@@ -73,57 +81,33 @@
|
||||
|
||||
### 3.1 财务日报汇总(dws_finance_daily_summary)
|
||||
|
||||
<!-- 待补充:每日财务汇总口径,含收入、支出、利润等 -->
|
||||
|
||||
- 目标表:`dws.dws_finance_daily_summary`
|
||||
- 数据来源:DWD 支付/退款/订单事实表
|
||||
- 数据来源:`dwd_settlement_head`、`dwd_groupbuy_redemption`、`dwd_recharge_order`、`dwd_member_balance_change`
|
||||
- 粒度:门店 × 日期
|
||||
- 核心指标:*(待定义)*
|
||||
- 核心指标:发生额(正价)、优惠合计(团购/会员/赠送卡/手动/抹零)、确认收入、现金流入/流出/净变动、卡消费、充值统计(首充/续充)、订单统计
|
||||
- 确认收入 = 发生额 - 优惠合计
|
||||
- 金额字段统一 `NUMERIC(12,2)`,货币单位人民币(CNY)
|
||||
|
||||
### 3.2 收入结构(dws_finance_income_structure)
|
||||
|
||||
<!-- 待补充:收入按来源/类型的分类口径 -->
|
||||
|
||||
- 目标表:`dws.dws_finance_income_structure`
|
||||
- 数据来源:DWD 支付事实表
|
||||
- 粒度:门店 × 日期 × 收入类型
|
||||
- 核心指标:*(待定义)*
|
||||
- 数据来源:`dwd_settlement_head`、`dwd_table_fee_log`、`dwd_assistant_service_log`、`dim_table`、`cfg_area_category`
|
||||
- 粒度:门店 × 日期 × 结构类型 × 分类代码
|
||||
- 两种分析维度:按收入类型(`INCOME_TYPE`:台费/商品/助教基础课/助教附加课)、按区域(`AREA`:通过 `cfg_area_category` 映射)
|
||||
|
||||
### 3.3 折扣明细(dws_finance_discount_detail)
|
||||
|
||||
<!-- 待补充:折扣/优惠统计口径 -->
|
||||
|
||||
- 目标表:`dws.dws_finance_discount_detail`
|
||||
- 数据来源:DWD 订单事实表
|
||||
- 粒度:门店 × 日期
|
||||
- 核心指标:*(待定义)*
|
||||
- 数据来源:`dwd_settlement_head`、`dwd_groupbuy_redemption`、`dwd_member_balance_change`
|
||||
- 粒度:门店 × 日期 × 折扣类型
|
||||
- 折扣类型:`GROUPBUY`/`VIP`/`ROUNDING`/`GIFT_CARD_TABLE`/`GIFT_CARD_DRINK`/`GIFT_CARD_COUPON`/`BIG_CUSTOMER`/`OTHER`
|
||||
|
||||
### 3.4 充值汇总(dws_finance_recharge_summary)
|
||||
|
||||
<!-- 待补充:充值金额、笔数等汇总口径 -->
|
||||
|
||||
- 目标表:`dws.dws_finance_recharge_summary`
|
||||
- 数据来源:DWD 充值事实表
|
||||
- 数据来源:`dwd_recharge_order`、`dim_member_card_account`
|
||||
- 粒度:门店 × 日期
|
||||
- 核心指标:*(待定义)*
|
||||
|
||||
### 3.5 支出汇总(dws_finance_expense_summary)
|
||||
|
||||
<!-- 待补充:支出分类汇总口径 -->
|
||||
|
||||
- 目标表:`dws.dws_finance_expense_summary`
|
||||
- 数据来源:DWD 支出事实表
|
||||
- 粒度:门店 × 日期
|
||||
- 核心指标:*(待定义)*
|
||||
|
||||
### 3.6 平台结算(dws_platform_settlement)
|
||||
|
||||
<!-- 待补充:第三方平台(团购等)结算口径 -->
|
||||
|
||||
- 目标表:`dws.dws_platform_settlement`
|
||||
- 数据来源:DWD 团购/支付事实表
|
||||
- 粒度:门店 × 日期
|
||||
- 核心指标:*(待定义)*
|
||||
- 核心指标:充值笔数/总额(现金+赠送)、首充/续充拆分、去重会员数、全店卡余额快照
|
||||
|
||||
---
|
||||
|
||||
@@ -131,21 +115,19 @@
|
||||
|
||||
### 4.1 会员消费汇总(dws_member_consumption_summary)
|
||||
|
||||
<!-- 待补充:会员消费行为汇总口径 -->
|
||||
|
||||
- 目标表:`dws.dws_member_consumption_summary`
|
||||
- 数据来源:DWD 订单/支付事实表、会员维度表
|
||||
- 数据来源:`dwd_settlement_head`、`dim_member`(SCD2)、`dim_member_card_account`(SCD2)
|
||||
- 粒度:门店 × 会员
|
||||
- 核心指标:*(待定义)*
|
||||
- 核心指标:全量累计消费、6 个滚动窗口(7/10/15/30/60/90 天)的到店次数与消费金额、卡余额(现金卡/赠送卡)、活跃度、客户分层
|
||||
- 客户分层规则:高价值(90 天 ≥ 3 次且 ≥ 1000 元)→ 中等(30 天内有消费)→ 低活跃(90 天内有但 30 天内无)→ 流失
|
||||
- 散客排除:`member_id` 为 0 或 None 不进入统计
|
||||
|
||||
### 4.2 会员到访明细(dws_member_visit_detail)
|
||||
|
||||
<!-- 待补充:会员到访频次、时段分布等口径 -->
|
||||
|
||||
- 目标表:`dws.dws_member_visit_detail`
|
||||
- 数据来源:DWD 订单事实表
|
||||
- 粒度:门店 × 会员 × 日期
|
||||
- 核心指标:*(待定义)*
|
||||
- 数据来源:`dwd_settlement_head`、`dwd_assistant_service_log`、`dwd_table_fee_log`、`dim_member`、`dim_table`、`cfg_area_category`
|
||||
- 粒度:门店 × 会员 × 结账单
|
||||
- 核心指标:消费金额拆分(台费/商品/助教)、支付方式拆分(现金/储值卡/赠送卡/团购券)、台桌使用时长、助教服务明细(JSON)
|
||||
|
||||
---
|
||||
|
||||
@@ -153,67 +135,84 @@
|
||||
|
||||
### 5.1 订单汇总宽表(dws_order_summary)
|
||||
|
||||
<!-- 待补充:订单维度的汇总宽表口径 -->
|
||||
|
||||
- 目标表:`dws.dws_order_summary`
|
||||
- 数据来源:DWD 订单/支付/退款事实表
|
||||
- 数据来源:`dwd_settlement_head`、`dwd_table_fee_log`、`dwd_assistant_service_log`、`dwd_store_goods_sale`、`dwd_groupbuy_redemption`、`dwd_refund`/`dwd_refund_ex`
|
||||
- 粒度:门店 × 结账单
|
||||
- 核心指标:*(待定义)*
|
||||
- 核心指标:费用明细(台费/助教/商品/团购)、优惠、金额汇总、支付方式、台账流水、有效消费、退款与净收入
|
||||
- 通过 6 个 CTE 多表合并,金额优先取明细表聚合值,回退到结账单头表汇总字段
|
||||
|
||||
---
|
||||
|
||||
## 6. 自定义指数算法
|
||||
## 6. 库存汇总
|
||||
|
||||
指数算法的详细计算流程、参数与归一化方法请参阅 [index_algorithm_cn.md](index_algorithm_cn.md)。
|
||||
### 6.1 库存日度汇总(dws_goods_stock_daily_summary)
|
||||
|
||||
- 目标表:`dws.dws_goods_stock_daily_summary`
|
||||
- 数据来源:`dwd_goods_stock_summary`
|
||||
- 粒度:门店 × 日期 × 商品
|
||||
- 更新策略:upsert(ON CONFLICT DO UPDATE)
|
||||
- 核心逻辑:按 `fetched_at` 日期分组,数值指标取 SUM,期初/期末取当日首/末条记录
|
||||
|
||||
### 6.2 库存周度汇总(dws_goods_stock_weekly_summary)
|
||||
|
||||
- 目标表:`dws.dws_goods_stock_weekly_summary`
|
||||
- 数据来源:`dwd_goods_stock_summary`
|
||||
- 粒度:门店 × ISO 周 × 商品
|
||||
- 更新策略:upsert(ON CONFLICT DO UPDATE)
|
||||
- 核心逻辑:按 ISO 周分组,`stat_date` = 该周周一
|
||||
|
||||
### 6.3 库存月度汇总(dws_goods_stock_monthly_summary)
|
||||
|
||||
- 目标表:`dws.dws_goods_stock_monthly_summary`
|
||||
- 数据来源:`dwd_goods_stock_summary`
|
||||
- 粒度:门店 × 自然月 × 商品
|
||||
- 更新策略:upsert(ON CONFLICT DO UPDATE)
|
||||
- 核心逻辑:按自然月分组,`stat_date` = 该月第一天
|
||||
|
||||
---
|
||||
|
||||
## 7. 自定义指数算法
|
||||
|
||||
指数算法的详细计算流程、参数与归一化方法请参阅 [index_tasks.md](../etl_tasks/index_tasks.md)。
|
||||
|
||||
以下为各指数对应的汇总表概览:
|
||||
|
||||
### 6.1 会员召回指数 — WBI(dws_member_recall_index)
|
||||
|
||||
<!-- 待补充:WBI 指数的业务口径与触发条件 -->
|
||||
|
||||
- 目标表:`dws.dws_member_recall_index`
|
||||
- 粒度:门店 × 会员
|
||||
|
||||
### 6.2 新客转化指数 — NCI(dws_member_newconv_index)
|
||||
|
||||
<!-- 待补充:NCI 指数的业务口径与评分规则 -->
|
||||
|
||||
- 目标表:`dws.dws_member_newconv_index`
|
||||
- 粒度:门店 × 会员
|
||||
|
||||
### 6.3 关系指数 — RS(dws_member_assistant_relation_index)
|
||||
|
||||
<!-- 待补充:RS 指数的业务口径与亲密度计算 -->
|
||||
|
||||
- 目标表:`dws.dws_member_assistant_relation_index`
|
||||
- 粒度:门店 × 会员 × 助教
|
||||
|
||||
### 6.4 助教-会员亲密度(dws_member_assistant_intimacy)
|
||||
|
||||
<!-- 待补充:亲密度评分口径 -->
|
||||
|
||||
- 目标表:`dws.dws_member_assistant_intimacy`
|
||||
- 粒度:门店 × 会员 × 助教
|
||||
|
||||
### 6.5 回流指数 — OS(dws_member_winback_index)
|
||||
|
||||
<!-- 待补充:OS 指数的业务口径与回流判定规则 -->
|
||||
### 7.1 回流指数 — WBI(dws_member_winback_index)
|
||||
|
||||
- 目标表:`dws.dws_member_winback_index`
|
||||
- 粒度:门店 × 会员
|
||||
- 任务代码:`DWS_WINBACK_INDEX`
|
||||
- 依赖:`DWS_MEMBER_VISIT`、`DWS_MEMBER_CONSUMPTION`
|
||||
|
||||
### 6.6 人工台账 — ML(dws_ml_manual_order_source / dws_ml_manual_order_alloc)
|
||||
### 7.2 新客转化指数 — NCI(dws_member_newconv_index)
|
||||
|
||||
<!-- 待补充:ML 人工台账的业务口径与分配规则 -->
|
||||
- 目标表:`dws.dws_member_newconv_index`
|
||||
- 粒度:门店 × 会员
|
||||
- 任务代码:`DWS_NEWCONV_INDEX`
|
||||
- 依赖:`DWS_MEMBER_VISIT`、`DWS_MEMBER_CONSUMPTION`
|
||||
|
||||
### 7.3 关系指数 — RS(dws_member_assistant_relation_index)
|
||||
|
||||
- 目标表:`dws.dws_relation_index`
|
||||
- 粒度:门店 × 会员 × 助教
|
||||
- 任务代码:`DWS_RELATION_INDEX`
|
||||
- 依赖:`DWS_ASSISTANT_DAILY`
|
||||
|
||||
### 7.4 消费力指数 — SPI(dws_member_spending_power_index)
|
||||
|
||||
- 目标表:`dws.dws_member_spending_power_index`
|
||||
- 粒度:门店 × 会员
|
||||
- 任务代码:`DWS_SPENDING_POWER_INDEX`
|
||||
- 依赖:`DWS_MEMBER_CONSUMPTION`
|
||||
|
||||
### 7.5 人工台账 — ML(dws_ml_manual_order_source / dws_ml_manual_order_alloc)
|
||||
|
||||
- 宽表:`dws.dws_ml_manual_order_source`
|
||||
- 窄表:`dws.dws_ml_manual_order_alloc`
|
||||
- 粒度:门店 × 订单 × 助教
|
||||
- 任务代码:`DWS_ML_MANUAL_IMPORT`
|
||||
|
||||
### 6.7 指数百分位历史(dws_index_percentile_history)
|
||||
|
||||
<!-- 待补充:指数百分位归一化的历史快照口径 -->
|
||||
### 7.6 指数百分位历史(dws_index_percentile_history)
|
||||
|
||||
- 目标表:`dws.dws_index_percentile_history`
|
||||
- 粒度:门店 × 指数类型 × 日期
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
本文档定义 `dwd` 模式下维度表的 SCD2(Slowly Changing Dimension Type 2)处理策略、
|
||||
生效区间管理和版本控制规则。
|
||||
|
||||
> **状态**:骨架文档,各维度表的跟踪字段与变更触发条件待补充。
|
||||
|
||||
---
|
||||
|
||||
## 1. 概述
|
||||
@@ -17,9 +15,18 @@ SCD2 通过保留维度记录的历史版本来追踪属性变化。当被跟踪
|
||||
|
||||
### 1.2 实现模块
|
||||
|
||||
- 处理器:`scd/scd2_handler.py` — `SCD2Handler` 类
|
||||
- 核心方法:`upsert(table_name, natural_key, tracked_fields, record, effective_date)`
|
||||
- 返回值:`INSERT`(新记录)、`UPDATE`(属性变更)、`UNCHANGED`(无变化)
|
||||
- 处理器:`tasks/dwd/dwd_load_task.py` — `_merge_dim_scd2()` 方法
|
||||
- 变更检测:`_is_row_changed()` — 比较所有非 SCD2 控制列,任一列值不同即视为变更
|
||||
- 批量关闭:`_close_current_dim_bulk()` — 批量设置旧版本的 `scd2_end_time` 和 `scd2_is_current = 0`
|
||||
- 批量插入:`_insert_dim_rows_bulk()` — 批量插入新版本行
|
||||
|
||||
### 1.3 变更检测逻辑
|
||||
|
||||
`_is_row_changed(current, incoming, dwd_cols)` 遍历目标表的所有列(排除 SCD2 控制列),逐列比较当前版本与新数据。比较时会进行类型归一化处理:
|
||||
- 空值归一化:`None`、空字符串、`"null"` 视为等价
|
||||
- 数值归一化:字符串形式的数字与 `Decimal`/`int` 比较前先转换
|
||||
- 布尔归一化:`"true"`/`"1"`/`"yes"` 等与 `True` 视为等价
|
||||
- 日期归一化:字符串形式的日期与 `datetime` 比较前先解析
|
||||
|
||||
---
|
||||
|
||||
@@ -38,103 +45,119 @@ SCD2 通过保留维度记录的历史版本来追踪属性变化。当被跟踪
|
||||
|
||||
- 主键:`(natural_key, scd2_start_time)` — 同一自然键的不同版本通过生效时间区分
|
||||
- 唯一索引:`WHERE scd2_is_current = 1` — 保证每个自然键只有一条当前记录
|
||||
- 排他约束(GiST):`tstzrange(scd2_start_time, scd2_end_time)` — 防止同一自然键的版本时间段重叠
|
||||
|
||||
---
|
||||
|
||||
## 3. 处理流程
|
||||
|
||||
```
|
||||
收到维度记录
|
||||
_merge_dim_scd2(cur, dwd_table, ods_table, dwd_cols, ods_cols, now)
|
||||
│
|
||||
▼
|
||||
按 natural_key 查找 valid_to IS NULL 的当前记录
|
||||
├── 1. 从 ODS 取最新有效版本(DISTINCT ON + is_delete IS DISTINCT FROM 1)
|
||||
│
|
||||
├── 不存在 → INSERT 新记录(is_current=1, valid_from=now)
|
||||
├── 2. 从 DWD 取当前版本(scd2_is_current = 1)
|
||||
│
|
||||
└── 存在 → 比较 tracked_fields
|
||||
│
|
||||
├── 无变化 → UNCHANGED(跳过)
|
||||
│
|
||||
└── 有变化 → UPDATE 旧记录(valid_to=now, is_current=0)
|
||||
INSERT 新记录(valid_from=now, is_current=1)
|
||||
├── 3. 按自然键逐条比较:
|
||||
│ │
|
||||
│ ├── DWD 中不存在 → 收集为待插入(INSERT)
|
||||
│ │
|
||||
│ ├── 存在但 _is_row_changed() 返回 True → 收集为待更新
|
||||
│ │ ├── 关闭旧版本(scd2_end_time = now, scd2_is_current = 0)
|
||||
│ │ └── 插入新版本(scd2_start_time = now, scd2_is_current = 1, version + 1)
|
||||
│ │
|
||||
│ └── 存在且无变化 → 跳过(UNCHANGED)
|
||||
│
|
||||
├── 4. _close_current_dim_bulk() — 批量关闭旧版本
|
||||
│
|
||||
└── 5. _insert_dim_rows_bulk() — 批量插入新版本
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. 维度表 SCD2 配置
|
||||
|
||||
> 跟踪字段 = 表中除自然键和 SCD2 控制列(`scd2_start_time`/`scd2_end_time`/`scd2_is_current`/`scd2_version`)之外的所有列。任一跟踪字段值变化即触发新版本。
|
||||
|
||||
### 4.1 门店维度(dim_site / dim_site_ex)
|
||||
|
||||
<!-- 待补充:自然键、跟踪字段列表 -->
|
||||
|
||||
- Schema:`dwd`
|
||||
- ODS 来源:`ods.table_fee_transactions`(从台费流水中的 `siteProfile` 快照提取)
|
||||
- 自然键:`site_id`
|
||||
- 跟踪字段:*(待定义)*
|
||||
- 变更触发场景:*(待补充)*
|
||||
- dim_site 跟踪字段:`org_id`、`tenant_id`、`shop_name`、`site_label`、`full_address`、`address`、`longitude`、`latitude`、`tenant_site_region_id`、`business_tel`、`site_type`、`shop_status`
|
||||
- dim_site_ex 跟踪字段:`avatar`、`address`、`longitude`、`latitude`、`tenant_site_region_id`、`auto_light`、`light_status`、`light_type`、`light_token`、`site_type`、`site_label`、`attendance_enabled`、`attendance_distance`、`customer_service_qrcode`、`customer_service_wechat`、`fixed_pay_qrcode`、`prod_env`、`shop_status`、`create_time`、`update_time`
|
||||
- 变更触发场景:门店名称/地址/状态/经纬度等基础信息变更
|
||||
|
||||
### 4.2 台桌维度(dim_table / dim_table_ex)
|
||||
|
||||
<!-- 待补充:自然键、跟踪字段列表 -->
|
||||
|
||||
- Schema:`dwd`
|
||||
- ODS 来源:`ods.site_tables_master`
|
||||
- 自然键:`table_id`
|
||||
- 跟踪字段:*(待定义)*
|
||||
- 变更触发场景:*(待补充)*
|
||||
- dim_table 跟踪字段:`site_id`、`table_name`、`site_table_area_id`、`site_table_area_name`、`tenant_table_area_id`、`table_price`、`order_id`
|
||||
- dim_table_ex 跟踪字段:`show_status`、`is_online_reservation`、`table_cloth_use_time`、`table_cloth_use_cycle`、`table_status`、`create_time`、`light_status`、`tablestatusname`、`sitename`、`applet_qr_code_url`、`audit_status`、`charge_free`、`delay_lights_time`、`is_rest_area`、`only_allow_groupon`、`order_delay_time`、`self_table`、`temporary_light_second`、`virtual_table`
|
||||
- 变更触发场景:台桌名称/区域/价格/状态变更
|
||||
|
||||
### 4.3 助教维度(dim_assistant / dim_assistant_ex)
|
||||
|
||||
<!-- 待补充:自然键、跟踪字段列表 -->
|
||||
|
||||
- Schema:`dwd`
|
||||
- ODS 来源:`ods.assistant_accounts_master`
|
||||
- 自然键:`assistant_id`
|
||||
- 跟踪字段:*(待定义)*
|
||||
- 变更触发场景:*(待补充)*
|
||||
- dim_assistant 跟踪字段:`user_id`、`assistant_no`、`real_name`、`nickname`、`mobile`、`tenant_id`、`site_id`、`team_id`、`team_name`、`level`、`entry_time`、`resign_time`、`leave_status`、`assistant_status`
|
||||
- dim_assistant_ex 跟踪字段:`gender`、`birth_date`、`avatar`、`introduce`、`video_introduction_url`、`height`、`weight`、`shop_name`、`group_id`、`group_name`、`person_org_id`、`staff_id`、`staff_profile_id`、`assistant_grade`、`sum_grade`、`get_grade_times`、`charge_way`、`allow_cx`、`is_guaranteed`、`salary_grant_enabled`、`entry_type`、`entry_sign_status`、`resign_sign_status`、`work_status`、`show_status`、`show_sort`、`online_status`、`is_delete`、`criticism_status`、`create_time`、`update_time`、`start_time`、`end_time`、`last_table_id`、`last_table_name`、`last_update_name`、`order_trade_no`、`ding_talk_synced`、`site_light_cfg_id`、`light_equipment_id`、`light_status`、`is_team_leader`、`serial_number`、`system_role_id`、`job_num`、`cx_unit_price`、`pd_unit_price`
|
||||
- 变更触发场景:助教等级/团队/状态/入职离职/评分等变更
|
||||
|
||||
### 4.4 会员维度(dim_member / dim_member_ex)
|
||||
|
||||
<!-- 待补充:自然键、跟踪字段列表 -->
|
||||
|
||||
- Schema:`dwd`
|
||||
- ODS 来源:`ods.member_profiles`
|
||||
- 自然键:`member_id`
|
||||
- 跟踪字段:*(待定义)*
|
||||
- 变更触发场景:*(待补充)*
|
||||
- dim_member 跟踪字段:`system_member_id`、`tenant_id`、`register_site_id`、`mobile`、`nickname`、`member_card_grade_code`、`member_card_grade_name`、`create_time`、`update_time`、`pay_money_sum`、`recharge_money_sum`、`birthday`
|
||||
- dim_member_ex 跟踪字段:`referrer_member_id`、`point`、`register_site_name`、`growth_value`、`user_status`、`status`、`person_tenant_org_id`、`person_tenant_org_name`、`register_source`
|
||||
- 变更触发场景:会员昵称/手机号/卡等级/累计消费充值/状态等变更
|
||||
|
||||
### 4.5 会员卡账户维度(dim_member_card_account / dim_member_card_account_ex)
|
||||
|
||||
<!-- 待补充:自然键、跟踪字段列表 -->
|
||||
|
||||
- Schema:`dwd`
|
||||
- ODS 来源:`ods.member_stored_value_cards`
|
||||
- 自然键:`member_card_id`
|
||||
- 跟踪字段:*(待定义)*
|
||||
- 变更触发场景:*(待补充)*
|
||||
- dim_member_card_account 跟踪字段:`tenant_id`、`register_site_id`、`tenant_member_id`、`system_member_id`、`card_type_id`、`member_card_grade_code`、`member_card_grade_code_name`、`member_card_type_name`、`member_name`、`member_mobile`、`balance`、`start_time`、`end_time`、`last_consume_time`、`status`、`is_delete`、`principal_balance`、`member_grade`
|
||||
- dim_member_card_account_ex 跟踪字段:(60+ 列,含各类折扣比例、抵扣开关等,详见 DDL)
|
||||
- 变更触发场景:卡余额/状态/折扣配置/有效期等变更
|
||||
|
||||
### 4.6 商品维度(dim_tenant_goods / dim_tenant_goods_ex / dim_store_goods / dim_store_goods_ex)
|
||||
### 4.6 商品维度
|
||||
|
||||
<!-- 待补充:自然键、跟踪字段列表 -->
|
||||
#### 租户商品(dim_tenant_goods / dim_tenant_goods_ex)
|
||||
|
||||
- Schema:`dwd`
|
||||
- 自然键:`tenant_goods_id` / `site_goods_id`
|
||||
- 跟踪字段:*(待定义)*
|
||||
- 变更触发场景:*(待补充)*
|
||||
- ODS 来源:`ods.tenant_goods_master`
|
||||
- 自然键:`tenant_goods_id`
|
||||
- dim_tenant_goods 跟踪字段:`tenant_id`、`supplier_id`、`category_name`、`goods_category_id`、`goods_second_category_id`、`goods_name`、`goods_number`、`unit`、`market_price`、`goods_state`、`create_time`、`update_time`、`is_delete`、`not_sale`
|
||||
|
||||
#### 门店商品(dim_store_goods / dim_store_goods_ex)
|
||||
|
||||
- ODS 来源:`ods.store_goods_master`
|
||||
- 自然键:`site_goods_id`
|
||||
- dim_store_goods 跟踪字段:`tenant_id`、`site_id`、`tenant_goods_id`、`goods_name`、`goods_category_id`、`goods_second_category_id`、`category_level1_name`、`category_level2_name`、`batch_stock_qty`、`sale_qty`、`total_sales_qty`、`sale_price`、`created_at`、`updated_at`、`avg_monthly_sales`、`goods_state`、`enable_status`、`send_state`、`is_delete`、`commodity_code`、`not_sale`
|
||||
|
||||
### 4.7 商品分类维度(dim_goods_category)
|
||||
|
||||
<!-- 待补充:自然键、跟踪字段列表 -->
|
||||
|
||||
- Schema:`dwd`
|
||||
- ODS 来源:`ods.stock_goods_category_tree`
|
||||
- 自然键:`category_id`
|
||||
- 跟踪字段:*(待定义)*
|
||||
- 变更触发场景:*(待补充)*
|
||||
- 跟踪字段:`tenant_id`、`category_name`、`alias_name`、`parent_category_id`、`business_name`、`tenant_goods_business_id`、`category_level`、`is_leaf`、`open_salesman`、`sort_order`、`is_warehousing`
|
||||
- 变更触发场景:分类名称/层级/排序/启用状态变更
|
||||
|
||||
### 4.8 团购套餐维度(dim_groupbuy_package / dim_groupbuy_package_ex)
|
||||
|
||||
<!-- 待补充:自然键、跟踪字段列表 -->
|
||||
|
||||
- Schema:`dwd`
|
||||
- ODS 来源:`ods.group_buy_packages`
|
||||
- 自然键:`groupbuy_package_id`
|
||||
- 跟踪字段:*(待定义)*
|
||||
- 变更触发场景:*(待补充)*
|
||||
- dim_groupbuy_package 跟踪字段:`tenant_id`、`site_id`、`package_name`、`package_template_id`、`selling_price`、`coupon_face_value`、`duration_seconds`、`start_time`、`end_time`、`table_area_name`、`is_enabled`、`is_delete`、`create_time`、`tenant_table_area_id_list`、`card_type_ids`、`sort`、`is_first_limit`
|
||||
- 变更触发场景:套餐名称/价格/面值/有效期/启用状态变更
|
||||
|
||||
### 4.9 员工维度(dim_staff / dim_staff_ex)
|
||||
|
||||
- ODS 来源:`ods.staff_info_master`
|
||||
- 自然键:`staff_id`
|
||||
- dim_staff 跟踪字段:`staff_name`、`alias_name`、`mobile`、`gender`、`job`、`tenant_id`、`site_id`、`system_role_id`、`staff_identity`、`status`、`leave_status`、`entry_time`、`resign_time`、`is_delete`
|
||||
- dim_staff_ex 跟踪字段:`avatar`、`job_num`、`account_status`、`rank_id`、`rank_name`、`new_rank_id`、`new_staff_identity`、`is_reserve`、`shop_name`、`site_label`、`tenant_org_id`、`system_user_id`、`cashier_point_id`、`cashier_point_name`、`group_id`、`group_name`、`staff_profile_id`、`auth_code`、`auth_code_create`、`ding_talk_synced`、`salary_grant_enabled`、`entry_type`、`entry_sign_status`、`resign_sign_status`、`criticism_status`、`create_time`、`user_roles`
|
||||
- 变更触发场景:员工姓名/岗位/角色/状态/入职离职等变更
|
||||
|
||||
---
|
||||
|
||||
@@ -167,9 +190,10 @@ ORDER BY scd2_start_time;
|
||||
|
||||
## 6. 注意事项
|
||||
|
||||
- **时区**:`scd2_start_time` / `scd2_end_time` 使用 `TIMESTAMPTZ`,统一以服务器时区存储
|
||||
- **时区**:`scd2_start_time` / `scd2_end_time` 使用 `TIMESTAMPTZ`,统一以 `Asia/Shanghai` 时区存储
|
||||
- **并发安全**:当前实现在单次 ETL 运行内串行处理,未做行级锁;并发写入需额外保护
|
||||
- **删除策略**:维度记录不做物理删除,仅通过关闭版本(`scd2_is_current = 0`)标记失效
|
||||
- **ODS 来源过滤**:从 ODS 取数时统一使用 `DISTINCT ON (natural_key) ... WHERE is_delete IS DISTINCT FROM 1 ORDER BY natural_key, fetched_at DESC`,确保取最新有效版本
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -51,10 +51,10 @@ graph LR
|
||||
| 文档 | 说明 |
|
||||
|------|------|
|
||||
| [BaseTask 公共机制](base_task_mechanism.md) | 任务基类模板方法、TaskContext、时间窗口、注册表、Flow 执行 |
|
||||
| [ODS 层任务](ods_tasks.md) | 22 个通用 ODS 任务的架构、配置结构、API 端点、目标表 |
|
||||
| [ODS 层任务](ods_tasks.md) | 23 个通用 ODS 任务的架构、配置结构、API 端点、目标表 |
|
||||
| [DWD 层任务](dwd_tasks.md) | DWD_LOAD_FROM_ODS 核心装载、SCD2 处理、质量校验 |
|
||||
| [DWS 层任务](dws_tasks.md) | 助教业绩、会员分析、财务统计、运维任务共 13 个 DWS 任务 |
|
||||
| [INDEX 层任务](index_tasks.md) | WBI/NCI/RS 指数算法 + ML 手动台账导入 |
|
||||
| [DWS 层任务](dws_tasks.md) | 助教业绩、会员分析、财务统计、库存汇总、运维任务共 17 个 DWS 任务 |
|
||||
| [INDEX 层任务](index_tasks.md) | WBI/NCI/RS/SPI 指数算法 + ML 手动台账导入 |
|
||||
| [工具类任务](utility_tasks.md) | Schema 初始化、手动入库、归档、截止检查、完整性校验 |
|
||||
|
||||
---
|
||||
@@ -89,6 +89,7 @@ graph LR
|
||||
| `ODS_STORE_GOODS_SALES` | `OdsGoodsLedgerTask` | `ods.store_goods_sales_records` | 门店商品销售流水 | [查看](ods_tasks.md) |
|
||||
| `ODS_TENANT_GOODS` | `OdsTenantGoodsTask` | `ods.tenant_goods_master` | 租户商品档案 | [查看](ods_tasks.md) |
|
||||
| `ODS_SETTLEMENT_RECORDS` | `OdsOrderSettleTask` | `ods.settlement_records` | 结账记录 | [查看](ods_tasks.md) |
|
||||
| `ODS_STAFF_INFO` | `OdsStaffInfoTask` | `ods.staff_info_master` | 员工档案(含在职/离职) | [查看](ods_tasks.md) |
|
||||
|
||||
### DWD 层(明细数据)
|
||||
|
||||
@@ -108,6 +109,7 @@ graph LR
|
||||
| `DWS_ASSISTANT_CUSTOMER` | `AssistantCustomerTask` | `dws_assistant_customer_stats` | 日期+助教+会员 | [查看](dws_tasks.md) |
|
||||
| `DWS_ASSISTANT_SALARY` | `AssistantSalaryTask` | `dws_assistant_salary_calc` | 月份+助教 | [查看](dws_tasks.md) |
|
||||
| `DWS_ASSISTANT_FINANCE` | `AssistantFinanceTask` | `dws_assistant_finance_analysis` | 日期+助教 | [查看](dws_tasks.md) |
|
||||
| `DWS_ASSISTANT_ORDER_CONTRIBUTION` | `AssistantOrderContributionTask` | `dws_assistant_order_contribution` | 日期+助教 | [查看](dws_tasks.md) |
|
||||
|
||||
#### 会员分析域
|
||||
|
||||
@@ -125,6 +127,14 @@ graph LR
|
||||
| `DWS_FINANCE_INCOME_STRUCTURE` | `FinanceIncomeStructureTask` | `dws_finance_income_structure` | 日期+收入类型 | [查看](dws_tasks.md) |
|
||||
| `DWS_FINANCE_DISCOUNT_DETAIL` | `FinanceDiscountDetailTask` | `dws_finance_discount_detail` | 日期+折扣类型 | [查看](dws_tasks.md) |
|
||||
|
||||
#### 库存汇总域
|
||||
|
||||
| 任务代码 | Python 类 | 目标表 | 粒度 | 详情 |
|
||||
|----------|-----------|--------|------|------|
|
||||
| `DWS_GOODS_STOCK_DAILY` | `GoodsStockDailyTask` | `dws_goods_stock_daily_summary` | 日期+商品 | [查看](dws_tasks.md) |
|
||||
| `DWS_GOODS_STOCK_WEEKLY` | `GoodsStockWeeklyTask` | `dws_goods_stock_weekly_summary` | ISO周+商品 | [查看](dws_tasks.md) |
|
||||
| `DWS_GOODS_STOCK_MONTHLY` | `GoodsStockMonthlyTask` | `dws_goods_stock_monthly_summary` | 月份+商品 | [查看](dws_tasks.md) |
|
||||
|
||||
#### 运维任务
|
||||
|
||||
| 任务代码 | Python 类 | 简要说明 | 详情 |
|
||||
@@ -140,6 +150,7 @@ graph LR
|
||||
| `DWS_NEWCONV_INDEX` | `NewconvIndexTask` | `dws_member_newconv_index` | NCI(新客转化指数) | [查看](index_tasks.md) |
|
||||
| `DWS_RELATION_INDEX` | `RelationIndexTask` | `dws_relation_index` | RS(关系指数) | [查看](index_tasks.md) |
|
||||
| `DWS_ML_MANUAL_IMPORT` | `MlManualImportTask` | `dws_ml_manual_ledger` | ML(手动台账导入) | [查看](index_tasks.md) |
|
||||
| `DWS_SPENDING_POWER_INDEX` | `SpendingPowerIndexTask` | `dws_member_spending_power_index` | SPI(消费力指数) | [查看](index_tasks.md) |
|
||||
|
||||
### 工具类 / 校验类
|
||||
|
||||
@@ -353,4 +364,4 @@ python -m cli.main --tasks DATA_INTEGRITY_CHECK
|
||||
|
||||
---
|
||||
|
||||
> 最后更新日期:2026-02-18
|
||||
> 最后更新日期:2026-02-26
|
||||
|
||||
@@ -286,8 +286,8 @@ default_registry.register("DWS_ASSISTANT_FINANCE", AssistantFinanceTask, layer="
|
||||
|----|------|------|
|
||||
| ODS | 23 | 通用 ODS 任务(由 `ODS_TASK_CLASSES` 动态生成),全部默认 `skip_unchanged=True` |
|
||||
| DWD | 2 | 含核心装载任务 `DWD_LOAD_FROM_ODS` 和质量检查 |
|
||||
| DWS | 13 | 助教业绩、会员分析、财务统计、统一维护任务(原 3 个 MV 刷新/清理任务已合并为 DWS_MAINTENANCE) |
|
||||
| INDEX | 4 | 回流指数、新客转化指数、关系指数、手动台账导入 |
|
||||
| DWS | 17 | 助教业绩(含订单流水贡献)、会员分析、财务统计、库存汇总、运维维护(原 3 个 MV 刷新/清理任务已合并为 DWS_MAINTENANCE) |
|
||||
| INDEX | 5 | 回流指数、新客转化指数、关系指数、消费力指数、手动台账导入 |
|
||||
| 工具类 | 7 | Schema 初始化、手动入库、归档、校验等 |
|
||||
| 校验类 | 1 | 数据完整性校验 |
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
|
||||
## 概述
|
||||
|
||||
DWS 层共有 13 个已注册任务,按业务域分为四组:
|
||||
DWS 层共有 17 个已注册任务(含 DWS_MAINTENANCE),按业务域分为五组:
|
||||
|
||||
### 助教业绩域(5 个)
|
||||
### 助教业绩域(6 个)
|
||||
|
||||
| 任务代码 | Python 类 | 目标表 | 粒度 | 更新策略 |
|
||||
|----------|-----------|--------|------|----------|
|
||||
@@ -19,6 +19,7 @@ DWS 层共有 13 个已注册任务,按业务域分为四组:
|
||||
| `DWS_ASSISTANT_CUSTOMER` | `AssistantCustomerTask` | `dws_assistant_customer_stats` | 日期+助教+会员 | delete-before-insert |
|
||||
| `DWS_ASSISTANT_SALARY` | `AssistantSalaryTask` | `dws_assistant_salary_calc` | 月份+助教 | delete-before-insert |
|
||||
| `DWS_ASSISTANT_FINANCE` | `AssistantFinanceTask` | `dws_assistant_finance_analysis` | 日期+助教 | delete-before-insert |
|
||||
| `DWS_ASSISTANT_ORDER_CONTRIBUTION` | `AssistantOrderContributionTask` | `dws_assistant_order_contribution` | 日期+助教 | delete-before-insert |
|
||||
|
||||
### 会员分析域(2 个)
|
||||
|
||||
@@ -36,6 +37,14 @@ DWS 层共有 13 个已注册任务,按业务域分为四组:
|
||||
| `DWS_FINANCE_INCOME_STRUCTURE` | `FinanceIncomeStructureTask` | `dws_finance_income_structure` | 日期+收入类型 | delete-before-insert |
|
||||
| `DWS_FINANCE_DISCOUNT_DETAIL` | `FinanceDiscountDetailTask` | `dws_finance_discount_detail` | 日期+折扣类型 | delete-before-insert |
|
||||
|
||||
### 库存汇总域(3 个)
|
||||
|
||||
| 任务代码 | Python 类 | 目标表 | 粒度 | 更新策略 |
|
||||
|----------|-----------|--------|------|----------|
|
||||
| `DWS_GOODS_STOCK_DAILY` | `GoodsStockDailyTask` | `dws_goods_stock_daily_summary` | 日期+商品 | upsert |
|
||||
| `DWS_GOODS_STOCK_WEEKLY` | `GoodsStockWeeklyTask` | `dws_goods_stock_weekly_summary` | ISO周+商品 | upsert |
|
||||
| `DWS_GOODS_STOCK_MONTHLY` | `GoodsStockMonthlyTask` | `dws_goods_stock_monthly_summary` | 月份+商品 | upsert |
|
||||
|
||||
### 运维任务(2 个)
|
||||
|
||||
| 任务代码 | Python 类 | 继承 | 说明 | 更新策略 |
|
||||
@@ -377,6 +386,51 @@ dwd_assistant_service_log ────► DWS_ASSISTANT_CUSTOMER(客户关系
|
||||
|
||||
---
|
||||
|
||||
### DWS_ASSISTANT_ORDER_CONTRIBUTION — 助教订单流水四项统计
|
||||
|
||||
| 属性 | 值 |
|
||||
|------|-----|
|
||||
| 任务代码 | `DWS_ASSISTANT_ORDER_CONTRIBUTION` |
|
||||
| Python 类 | `AssistantOrderContributionTask`(`tasks/dws/assistant_order_contribution_task.py`) |
|
||||
| 目标表 | `dws.dws_assistant_order_contribution` |
|
||||
| 主键 | `site_id`, `assistant_id`, `stat_date` |
|
||||
| 粒度 | 日期 + 助教 |
|
||||
| 更新策略 | delete-before-insert(按日期窗口) |
|
||||
| 更新频率 | 每日更新 |
|
||||
| 依赖 | `DWD_LOAD_FROM_ODS` |
|
||||
|
||||
#### 数据来源
|
||||
|
||||
| 来源表 | Schema | 用途 |
|
||||
|--------|--------|------|
|
||||
| `dwd_settlement_head` | `dwd` | 结算主表(订单级信息) |
|
||||
| `dwd_table_fee_log` | `dwd` | 台费明细(台桌使用时长、台费金额) |
|
||||
| `dwd_assistant_service_log` | `dwd` | 助教服务记录(服务时长、流水、分成) |
|
||||
|
||||
#### 聚合维度与输出字段
|
||||
|
||||
按 `(assistant_id, stat_date)` 聚合,输出以下字段:
|
||||
|
||||
| 字段分组 | 字段 | 说明 |
|
||||
|----------|------|------|
|
||||
| 标识 | `site_id`, `tenant_id`, `assistant_id`, `assistant_nickname`, `stat_date` | 门店、助教、日期 |
|
||||
| 四项统计 | `order_gross_revenue` | 订单总流水:台费 + 酒水食品 + 所有助教服务费 |
|
||||
| | `order_net_revenue` | 订单净流水:订单总流水 - 所有助教服务分成 |
|
||||
| | `time_weighted_revenue` | 时效贡献流水:按服务时长折算的个人贡献 |
|
||||
| | `time_weighted_net_revenue` | 时效净贡献:时效贡献流水 - 个人服务分成 |
|
||||
| 辅助 | `order_count`, `total_service_seconds` | 参与订单数、总服务时长秒数 |
|
||||
|
||||
#### 核心业务逻辑
|
||||
|
||||
1. **订单总流水(order_gross_revenue)**:助教参与订单的全部流水(台费 + 酒水食品 + 所有助教服务费),每个参与助教获得相同值
|
||||
2. **订单净流水(order_net_revenue)**:订单总流水 - 该订单所有助教的服务分成总额,每个参与助教获得相同值
|
||||
3. **时效贡献流水(time_weighted_revenue)**:台费按助教在各台桌的服务时长占比分摊 + 个人服务费 + 酒水食品按总时长比例均分
|
||||
4. **时效净贡献(time_weighted_net_revenue)**:时效贡献流水 - 该助教个人的服务分成
|
||||
5. **超休/打赏课特殊处理**:`course_type = BONUS` 的助教不参与订单级分摊,四项统计均等于个人服务流水和分成
|
||||
6. **台费分摊公式**:`billable_seconds = MAX(SUM(助教服务时长), 台桌使用时长)`,各助教按 `service_seconds / billable_seconds` 比例分摊
|
||||
|
||||
---
|
||||
|
||||
### DWS_ASSISTANT_DAILY — 助教日度业绩明细
|
||||
|
||||
| 属性 | 值 |
|
||||
|
||||
@@ -8,16 +8,17 @@
|
||||
|
||||
## 概述
|
||||
|
||||
INDEX 层共有 4 个已注册任务:
|
||||
INDEX 层共有 5 个已注册任务:
|
||||
|
||||
| 任务代码 | Python 类 | 目标表 | 指数类型 | 更新策略 |
|
||||
|----------|-----------|--------|----------|----------|
|
||||
| `DWS_WINBACK_INDEX` | `WinbackIndexTask` | `dws_member_winback_index` | WBI(回流指数) | delete-before-insert(按门店全量刷新) |
|
||||
| `DWS_NEWCONV_INDEX` | `NewconvIndexTask` | `dws_member_newconv_index` | NCI(新客转化指数) | delete-before-insert(按门店全量刷新) |
|
||||
| `DWS_RELATION_INDEX` | `RelationIndexTask` | `dws_member_assistant_relation_index` | RS/OS/MS/ML(关系指数) | delete-before-insert(按门店全量刷新) |
|
||||
| `DWS_SPENDING_POWER_INDEX` | `SpendingPowerIndexTask` | `dws_member_spending_power_index` | SPI(消费力指数) | delete-before-insert(按门店全量刷新) |
|
||||
| `DWS_ML_MANUAL_IMPORT` | `MlManualImportTask` | `dws_ml_manual_order_source` / `dws_ml_manual_order_alloc` | ML(手动台账导入) | 按 scope 先删后写 |
|
||||
|
||||
> 注册位置:`orchestration/task_registry.py`,所有 INDEX 任务的 `requires_db_config=False`、`layer="INDEX"`。
|
||||
> 注册位置:`orchestration/task_registry.py`,所有 INDEX 任务的 `requires_db_config=False`、`layer="INDEX"`。SPI 任务额外声明 `depends_on=["DWS_MEMBER_CONSUMPTION"]`。
|
||||
|
||||
---
|
||||
|
||||
@@ -34,8 +35,9 @@ BaseTask
|
||||
├── MemberIndexBaseTask ← WBI / NCI 共享的会员特征提取
|
||||
│ ├── WinbackIndexTask
|
||||
│ └── NewconvIndexTask
|
||||
├── RelationIndexTask ← RS/OS/MS/ML 四合一
|
||||
└── MlManualImportTask ← ML 人工台账导入
|
||||
├── RelationIndexTask ← RS/OS/MS/ML 四合一
|
||||
├── SpendingPowerIndexTask ← SPI 消费力指数(独立数据提取)
|
||||
└── MlManualImportTask ← ML 人工台账导入
|
||||
```
|
||||
|
||||
### 子类必须实现的抽象方法
|
||||
@@ -414,6 +416,177 @@ NCI 产出 3 个 Display Score:
|
||||
| `w_value` | 1.0 | 价值权重 |
|
||||
|
||||
|
||||
---
|
||||
|
||||
## DWS_SPENDING_POWER_INDEX — 消费力指数(SPI)
|
||||
|
||||
| 属性 | 值 |
|
||||
|------|-----|
|
||||
| 任务代码 | `DWS_SPENDING_POWER_INDEX` |
|
||||
| Python 类 | `SpendingPowerIndexTask`(`tasks/dws/index/spending_power_index_task.py`) |
|
||||
| 继承链 | `BaseTask → BaseDwsTask → BaseIndexTask → SpendingPowerIndexTask` |
|
||||
| 目标表 | `dws.dws_member_spending_power_index` |
|
||||
| 主键 | `site_id, member_id` |
|
||||
| 指数类型 | `SPI` |
|
||||
| 依赖任务 | `DWS_MEMBER_CONSUMPTION` |
|
||||
| 更新策略 | 按门店全量刷新(先 DELETE WHERE site_id = %s,再 INSERT) |
|
||||
|
||||
### 业务含义
|
||||
|
||||
SPI 衡量会员在门店内的综合消费力层级——分数越高,表示该会员的消费能力和消费意愿越强。适用于客户分层、资源分配和精准营销场景,与 WBI/NCI 等运营指数协同使用。
|
||||
|
||||
SPI 不使用 `MemberIndexBaseTask` 的会员分群逻辑(NEW/OLD/STOP),所有在近 90 天内有消费或充值记录的会员均参与计算。
|
||||
|
||||
### 计算范围
|
||||
|
||||
所有在近 90 天内有消费订单(settle_type IN (1, 3))或充值订单(settle_type = 5)的会员。无任何消费/充值数据的门店跳过计算,返回 `{'status': 'skipped', 'reason': 'no_data'}`。
|
||||
|
||||
### 数据来源
|
||||
|
||||
| 数据 | 来源表 | 提取方式 |
|
||||
|------|--------|----------|
|
||||
| 消费订单 | `dwd.dwd_settlement_head` | settle_type IN (1, 3),近 90 天,聚合为会员级特征 |
|
||||
| 充值订单 | `dwd.dwd_recharge_order` | settle_type = 5,近 90 天,聚合为会员级充值特征 |
|
||||
| 算法参数 | `dws.cfg_index_parameters` | index_type = 'SPI' |
|
||||
|
||||
### 基础特征(SPIMemberFeatures)
|
||||
|
||||
从 DWD 层提取并计算的会员级消费特征:
|
||||
|
||||
| 字段 | 类型 | 含义 |
|
||||
|------|------|------|
|
||||
| `spend_30` | float | 近 30 天消费总额 |
|
||||
| `spend_90` | float | 近 90 天消费总额 |
|
||||
| `recharge_90` | float | 近 90 天充值总额 |
|
||||
| `orders_30` | int | 近 30 天消费笔数 |
|
||||
| `orders_90` | int | 近 90 天消费笔数 |
|
||||
| `visit_days_30` | int | 近 30 天消费日数(按天去重) |
|
||||
| `visit_days_90` | int | 近 90 天消费日数(按天去重) |
|
||||
| `avg_ticket_90` | float | 90 天客单价:`spend_90 / max(orders_90, 1)` |
|
||||
| `active_weeks_90` | int | 近 90 天有消费的自然周数(最多 13 周) |
|
||||
| `daily_spend_ewma_90` | float | 近 90 天日消费 EWMA |
|
||||
|
||||
### 算法概要
|
||||
|
||||
SPI 由三个子分加权合成:
|
||||
|
||||
```
|
||||
SPI_raw = w_L × Level + w_S × Speed + w_P × Stability
|
||||
```
|
||||
|
||||
默认权重:`w_L = 0.60`、`w_S = 0.30`、`w_P = 0.10`。
|
||||
|
||||
#### 子分 1:消费水平(Level)
|
||||
|
||||
基于消费金额和客单价的 log1p 压缩加权,衡量客户消费金额层级:
|
||||
|
||||
```
|
||||
L = w_s30 × ln(1 + spend_30 / M30)
|
||||
+ w_s90 × ln(1 + spend_90 / M90)
|
||||
+ w_ticket × ln(1 + avg_ticket_90 / T0)
|
||||
+ w_r90 × ln(1 + recharge_90 / R90)
|
||||
```
|
||||
|
||||
| 参数 | 默认值 | 含义 |
|
||||
|------|--------|------|
|
||||
| `w_level_spend_30` (w_s30) | 0.30 | 近 30 天消费权重 |
|
||||
| `w_level_spend_90` (w_s90) | 0.35 | 近 90 天消费权重 |
|
||||
| `w_level_ticket_90` (w_ticket) | 0.20 | 客单价权重 |
|
||||
| `w_level_recharge_90` (w_r90) | 0.15 | 充值权重 |
|
||||
| `amount_base_spend_30` (M30) | 500 | 30 天消费压缩基数 |
|
||||
| `amount_base_spend_90` (M90) | 1500 | 90 天消费压缩基数 |
|
||||
| `amount_base_ticket_90` (T0) | 200 | 客单价压缩基数 |
|
||||
| `amount_base_recharge_90` (R90) | 1000 | 充值压缩基数 |
|
||||
|
||||
当所有消费和充值金额均为 0 时,Level 子分为 0.0。
|
||||
|
||||
#### 子分 2:消费速度(Speed)
|
||||
|
||||
衡量近期消费推进速度与节奏变化,由三个速度指标加权合成:
|
||||
|
||||
```
|
||||
S = w_abs × V_abs + w_rel × max(0, V_rel) + w_ewma × V_ewma
|
||||
```
|
||||
|
||||
| 速度指标 | 公式 | 含义 |
|
||||
|----------|------|------|
|
||||
| V_abs(绝对速度) | `ln(1 + spend_30 / (max(visit_days_30, 1) × V0))` | 每消费日平均消费的对数压缩 |
|
||||
| V_rel(相对速度) | `ln((v_30 + ε) / (v_90 + ε))`,其中 `v_30 = spend_30/30`,`v_90 = spend_90/90` | 近期消费速率相对长期的变化 |
|
||||
| V_ewma(EWMA 速度) | `ln(1 + daily_spend_ewma_90 / E0)` | 日消费 EWMA 的对数压缩 |
|
||||
|
||||
设计要点:仅对加速(`V_rel > 0`)加分,不对减速直接扣分(通过 `max(0, V_rel)` 实现)。
|
||||
|
||||
| 参数 | 默认值 | 含义 |
|
||||
|------|--------|------|
|
||||
| `w_speed_abs` | 0.50 | 绝对速度权重 |
|
||||
| `w_speed_rel` | 0.30 | 相对速度权重 |
|
||||
| `w_speed_ewma` | 0.20 | EWMA 速度权重 |
|
||||
| `amount_base_speed_abs` (V0) | 100 | 绝对速度压缩基数 |
|
||||
| `amount_base_ewma_90` (E0) | 50 | EWMA 速度压缩基数 |
|
||||
| `speed_epsilon` (ε) | 1e-6 | 防除零小量 |
|
||||
|
||||
#### 子分 3:消费稳定性(Stability)
|
||||
|
||||
基于近 90 天周覆盖率,识别稳定高消费与偶发冲高:
|
||||
|
||||
```
|
||||
P = active_weeks_90 / 13
|
||||
```
|
||||
|
||||
近 90 天共约 13 个自然周,`active_weeks_90` 为其中有消费的周数。取值范围 [0, 1]。
|
||||
|
||||
当 `use_stability = 0` 时,Stability 子分权重视为 0,跳过稳定性计算。
|
||||
|
||||
### Display Score 归一化
|
||||
|
||||
SPI 产出 4 组 Display Score,各自独立归一化到 0-10 分:
|
||||
|
||||
| 展示分 | 对应 Raw Score | 分位历史 index_type |
|
||||
|--------|---------------|---------------------|
|
||||
| `display_score` | `raw_score`(SPI 总分) | `SPI` |
|
||||
| `score_level_display` | `score_level_raw` | `SPI_LEVEL` |
|
||||
| `score_speed_display` | `score_speed_raw` | `SPI_SPEED` |
|
||||
| `score_stability_display` | `score_stability_raw` | `SPI_STABILITY` |
|
||||
|
||||
归一化流程复用 `BaseIndexTask.batch_normalize_to_display`:
|
||||
|
||||
```
|
||||
Raw Score → [可选压缩] → Winsorize(P5, P95) → MinMax(0, 10) → [可选 EWMA 平滑]
|
||||
```
|
||||
|
||||
### 金额压缩基数校准
|
||||
|
||||
SPI 支持金额压缩基数的自动校准机制:
|
||||
|
||||
1. 首次执行或参数缺失时,从门店近 90 天消费数据计算各基数的中位数作为建议值
|
||||
2. 若 `cfg_index_parameters` 中已存在对应参数,优先使用配置表中的值
|
||||
3. 实际使用的基数值会输出到日志,便于运营人员审查和手动调优
|
||||
|
||||
### 执行流程
|
||||
|
||||
```
|
||||
1. 获取 site_id
|
||||
2. load_index_parameters('SPI') 加载参数(缺失参数使用 DEFAULT_PARAMS)
|
||||
3. _extract_spending_features:从 dwd_settlement_head 提取消费特征
|
||||
4. _extract_recharge_features:从 dwd_recharge_order 提取充值特征
|
||||
5. _compute_daily_spend_ewma:计算日消费 EWMA
|
||||
6. _calibrate_amount_bases:校准金额压缩基数
|
||||
7. 逐会员计算:compute_level → compute_speed → compute_stability → compute_spi_raw
|
||||
8. batch_normalize_to_display:SPI 总分 + 三个子分各自独立归一化
|
||||
9. DELETE FROM dws_member_spending_power_index WHERE site_id = %s
|
||||
10. _save_spi_data:批量 INSERT
|
||||
11. 保存分位点历史到 dws_index_percentile_history(index_type='SPI')
|
||||
```
|
||||
|
||||
### 默认权重
|
||||
|
||||
| 参数 | 默认值 | 含义 |
|
||||
|------|--------|------|
|
||||
| `weight_level` | 0.60 | Level 子分在总分中的权重 |
|
||||
| `weight_speed` | 0.30 | Speed 子分在总分中的权重 |
|
||||
| `weight_stability` | 0.10 | Stability 子分在总分中的权重 |
|
||||
|
||||
|
||||
---
|
||||
|
||||
## DWS_RELATION_INDEX — 关系指数(RS/OS/MS/ML)
|
||||
@@ -751,5 +924,29 @@ ORDER BY effective_from DESC
|
||||
| `compression_mode` | 1 | 压缩模式(默认 log1p) |
|
||||
| `use_smoothing` / `ewma_alpha` | 1 / 0.2 | EWMA 平滑 |
|
||||
|
||||
### SPI 参数清单
|
||||
|
||||
| 参数名 | 默认值 | 说明 |
|
||||
|--------|--------|------|
|
||||
| `spend_window_short_days` | 30 | 短期消费窗口(天) |
|
||||
| `spend_window_long_days` | 90 | 长期消费窗口(天) |
|
||||
| `ewma_alpha_daily_spend` | 0.3 | 日消费 EWMA 平滑系数 |
|
||||
| `amount_base_spend_30` | 500 | 30 天消费压缩基数 |
|
||||
| `amount_base_spend_90` | 1500 | 90 天消费压缩基数 |
|
||||
| `amount_base_ticket_90` | 200 | 客单价压缩基数 |
|
||||
| `amount_base_recharge_90` | 1000 | 充值压缩基数 |
|
||||
| `amount_base_speed_abs` | 100 | 绝对速度压缩基数 |
|
||||
| `amount_base_ewma_90` | 50 | EWMA 速度压缩基数 |
|
||||
| `w_level_spend_30` / `w_level_spend_90` | 0.30 / 0.35 | Level 子分中消费权重 |
|
||||
| `w_level_ticket_90` / `w_level_recharge_90` | 0.20 / 0.15 | Level 子分中客单/充值权重 |
|
||||
| `w_speed_abs` / `w_speed_rel` / `w_speed_ewma` | 0.50 / 0.30 / 0.20 | Speed 子分三项权重 |
|
||||
| `weight_level` / `weight_speed` / `weight_stability` | 0.60 / 0.30 / 0.10 | SPI 总分三子分权重 |
|
||||
| `stability_window_days` | 90 | 稳定性计算窗口(天) |
|
||||
| `use_stability` | 1 | 是否启用稳定性子分(0=跳过) |
|
||||
| `percentile_lower` / `percentile_upper` | 5 / 95 | 归一化分位点 |
|
||||
| `compression_mode` | 1 | 压缩模式(默认 log1p) |
|
||||
| `use_smoothing` / `ewma_alpha` | 1 / 0.2 | EWMA 分位平滑 |
|
||||
| `speed_epsilon` | 1e-6 | 速度计算防除零小量 |
|
||||
|
||||
> 种子数据脚本:`db/etl_feiqiu/seeds/seed_index_parameters.sql`
|
||||
> DDL 定义:`docs/database/ddl/etl_feiqiu__dws.sql`
|
||||
|
||||
@@ -228,7 +228,7 @@ execute(cursor_data)
|
||||
|
||||
### content_hash 去重机制
|
||||
|
||||
`content_hash` 是通用 ODS 任务的核心去重手段,所有 22 个任务默认开启(`skip_unchanged=True`)。
|
||||
`content_hash` 是通用 ODS 任务的核心去重手段,所有 23 个任务默认开启(`skip_unchanged=True`)。
|
||||
|
||||
#### 计算方式
|
||||
|
||||
|
||||
@@ -118,12 +118,6 @@ psql "$PG_DSN" -f db/etl_feiqiu/seeds/seed_*.sql
|
||||
> 注:旧的 `db/etl_feiqiu/schemas/` 和 `db/etl_feiqiu/migrations/` 已归档至 `db/_archived/`。
|
||||
> DDL 基线现由 `docs/database/ddl/` 统一管理,可通过 `python scripts/ops/gen_consolidated_ddl.py` 重新生成。
|
||||
|
||||
或使用 CLI 工具任务初始化:
|
||||
|
||||
```bash
|
||||
python -m cli.main --tasks INIT_ODS_SCHEMA,INIT_DWD_SCHEMA,INIT_DWS_SCHEMA,SEED_DWS_CONFIG --pg-dsn "$PG_DSN"
|
||||
```
|
||||
|
||||
## 5. 验证安装
|
||||
|
||||
```bash
|
||||
|
||||
@@ -123,7 +123,7 @@
|
||||
|
||||
**解决方案**:
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
uv sync
|
||||
```
|
||||
|
||||
### 5.3 编码问题
|
||||
|
||||
@@ -166,7 +166,11 @@ class FlowRunner:
|
||||
|
||||
timer.start_step("INCREMENT_ETL")
|
||||
if task_codes:
|
||||
results = self.task_executor.run_tasks(task_codes, data_source=data_source)
|
||||
# CHANGE [2026-02-24] intent: 对前端传入的 task_codes 也执行拓扑排序,
|
||||
# 避免 DWS 在 DWD 未完成时就开始计算(跨层依赖顺序缺失 bug)
|
||||
# prompt: "修复管理后台全选任务时不按层级顺序执行的问题"
|
||||
sorted_codes = topological_sort(task_codes, self.task_registry)
|
||||
results = self.task_executor.run_tasks(sorted_codes, data_source=data_source)
|
||||
else:
|
||||
auto_tasks = self._resolve_tasks(layers)
|
||||
results = self.task_executor.run_tasks(auto_tasks, data_source=data_source)
|
||||
|
||||
@@ -107,6 +107,11 @@ class TaskExecutor:
|
||||
results.append(result_entry)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
self.logger.error("任务 %s 失败: %s", task_code, exc, exc_info=True)
|
||||
# CHANGE 2026-02-24 | 任务失败后 rollback,防止 InFailedSqlTransaction 级联
|
||||
try:
|
||||
self.db.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
results.append({
|
||||
"task_code": task_code,
|
||||
"status": "失败",
|
||||
|
||||
@@ -30,6 +30,7 @@ from tasks.utility.seed_dws_config_task import SeedDwsConfigTask
|
||||
# DWS 层任务导入
|
||||
from tasks.dws import (
|
||||
AssistantDailyTask,
|
||||
AssistantOrderContributionTask,
|
||||
AssistantMonthlyTask,
|
||||
AssistantCustomerTask,
|
||||
AssistantSalaryTask,
|
||||
@@ -147,6 +148,7 @@ default_registry.register("DATA_INTEGRITY_CHECK", DataIntegrityTask, requires_db
|
||||
# ── DWS 层业务任务 ────────────────────────────────────────────
|
||||
default_registry.register("DWS_BUILD_ORDER_SUMMARY", DwsBuildOrderSummaryTask, requires_db_config=False, layer="DWS")
|
||||
default_registry.register("DWS_ASSISTANT_DAILY", AssistantDailyTask, layer="DWS")
|
||||
default_registry.register("DWS_ASSISTANT_ORDER_CONTRIBUTION", AssistantOrderContributionTask, layer="DWS", depends_on=["DWD_LOAD_FROM_ODS"])
|
||||
# CHANGE [2026-07-17] intent: 为已知依赖关系添加 depends_on 声明(需求 8.1, 8.2)
|
||||
default_registry.register("DWS_ASSISTANT_MONTHLY", AssistantMonthlyTask, layer="DWS", depends_on=["DWS_ASSISTANT_DAILY"])
|
||||
default_registry.register("DWS_ASSISTANT_CUSTOMER", AssistantCustomerTask, layer="DWS")
|
||||
@@ -166,7 +168,8 @@ default_registry.register("DWS_GOODS_STOCK_MONTHLY", GoodsStockMonthlyTask, laye
|
||||
# 替换为统一维护任务 DWS_MAINTENANCE(需求 4.5)
|
||||
# depends_on: 所有其他 DWS 任务——MV 刷新和清理应在数据写入后执行
|
||||
default_registry.register("DWS_MAINTENANCE", DwsMaintenanceTask, layer="DWS", depends_on=[
|
||||
"DWS_ASSISTANT_DAILY", "DWS_ASSISTANT_MONTHLY", "DWS_ASSISTANT_CUSTOMER",
|
||||
"DWS_ASSISTANT_DAILY", "DWS_ASSISTANT_ORDER_CONTRIBUTION",
|
||||
"DWS_ASSISTANT_MONTHLY", "DWS_ASSISTANT_CUSTOMER",
|
||||
"DWS_ASSISTANT_SALARY", "DWS_ASSISTANT_FINANCE",
|
||||
"DWS_MEMBER_CONSUMPTION", "DWS_MEMBER_VISIT",
|
||||
"DWS_FINANCE_DAILY", "DWS_FINANCE_RECHARGE",
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
"""拓扑排序模块 — Kahn's algorithm
|
||||
|
||||
对任务列表按依赖关系执行拓扑排序:
|
||||
- 显式依赖:TaskMeta.depends_on 声明的任务间依赖
|
||||
- 隐含层级依赖:ODS → DWD → DWS → INDEX,同批任务中低层任务必须先于高层任务
|
||||
- 仅对当前执行列表内的任务排序
|
||||
- depends_on 中引用的任务不在列表内时记录警告
|
||||
- 检测循环依赖并抛出 ValueError
|
||||
@@ -11,10 +13,22 @@ import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# 层级优先级:数值越小越先执行
|
||||
_LAYER_ORDER: dict[str, int] = {
|
||||
"ODS": 0,
|
||||
"DWD": 1,
|
||||
"DWS": 2,
|
||||
"INDEX": 3,
|
||||
}
|
||||
|
||||
|
||||
def topological_sort(task_codes: list[str], registry) -> list[str]:
|
||||
"""对任务列表执行拓扑排序(Kahn's algorithm)。
|
||||
|
||||
除了显式 depends_on 依赖外,还注入隐含的层级依赖:
|
||||
同批任务中,所有 ODS 任务排在 DWD 之前,DWD 排在 DWS 之前,
|
||||
DWS 排在 INDEX 之前。这确保跨层执行顺序正确。
|
||||
|
||||
Args:
|
||||
task_codes: 待排序的任务代码列表
|
||||
registry: TaskRegistry 实例,提供 get_metadata() 查询依赖
|
||||
@@ -29,9 +43,10 @@ def topological_sort(task_codes: list[str], registry) -> list[str]:
|
||||
return []
|
||||
|
||||
in_degree = {code: 0 for code in task_codes}
|
||||
graph = {code: [] for code in task_codes}
|
||||
graph: dict[str, list[str]] = {code: [] for code in task_codes}
|
||||
task_set = set(task_codes)
|
||||
|
||||
# 1. 显式依赖(depends_on)
|
||||
for code in task_codes:
|
||||
meta = registry.get_metadata(code)
|
||||
if meta and meta.depends_on:
|
||||
@@ -44,6 +59,31 @@ def topological_sort(task_codes: list[str], registry) -> list[str]:
|
||||
"任务 %s 依赖 %s,但后者不在当前执行列表中", code, dep
|
||||
)
|
||||
|
||||
# CHANGE [2026-02-24] intent: 注入隐含层级依赖,确保跨层执行顺序正确
|
||||
# assumptions: 层级顺序固定为 ODS→DWD→DWS→INDEX;同层任务无隐含互相依赖
|
||||
# prompt: "修复管理后台全选任务时不按层级顺序执行的问题"
|
||||
# 2. 隐含层级依赖:按层分组,相邻层之间建立边
|
||||
# 选择每层一个"代表节点"作为屏障,避免 O(n*m) 的全连接边
|
||||
layer_groups: dict[int, list[str]] = {}
|
||||
for code in task_codes:
|
||||
meta = registry.get_metadata(code)
|
||||
if meta and meta.layer:
|
||||
order = _LAYER_ORDER.get(meta.layer.upper())
|
||||
if order is not None:
|
||||
layer_groups.setdefault(order, []).append(code)
|
||||
|
||||
sorted_layers = sorted(layer_groups.keys())
|
||||
for i in range(len(sorted_layers) - 1):
|
||||
lower_layer = sorted_layers[i]
|
||||
higher_layer = sorted_layers[i + 1]
|
||||
# 高层的每个任务都依赖低层的所有任务
|
||||
for higher_code in layer_groups[higher_layer]:
|
||||
for lower_code in layer_groups[lower_layer]:
|
||||
# 避免重复添加已有的显式依赖边
|
||||
if higher_code not in graph[lower_code]:
|
||||
graph[lower_code].append(higher_code)
|
||||
in_degree[higher_code] += 1
|
||||
|
||||
queue = deque(code for code in task_codes if in_degree[code] == 0)
|
||||
result = []
|
||||
while queue:
|
||||
|
||||
@@ -606,6 +606,11 @@ def run_consistency_check(
|
||||
report.ods_vs_dwd_results.append(result)
|
||||
|
||||
except Exception as exc:
|
||||
# CHANGE 2026-02-24 | rollback 防止 InFailedSqlTransaction 级联到后续表检查
|
||||
try:
|
||||
db_conn.conn.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
result = TableCheckResult(
|
||||
table_name=dwd_full,
|
||||
check_type="ods_vs_dwd",
|
||||
|
||||
393
apps/etl/connectors/feiqiu/scripts/verify_dws_extensions.py
Normal file
393
apps/etl/connectors/feiqiu/scripts/verify_dws_extensions.py
Normal file
@@ -0,0 +1,393 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""DWS 层扩展验证脚本 — 影子跑数验证。
|
||||
|
||||
对照 Requirements 9.1–9.4 验证三个 DWS 表的结构完整性和数据合理性:
|
||||
1. dws_assistant_order_contribution — 四项统计一致性
|
||||
2. dws_member_consumption_summary — 充值窗口字段
|
||||
3. dws_assistant_daily_detail — 定档折算惩罚字段
|
||||
4. RLS 视图 + FDW 外部表存在性
|
||||
|
||||
用法:
|
||||
cd apps/etl/connectors/feiqiu
|
||||
python scripts/verify_dws_extensions.py
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 1. 加载根 .env(遵循 testing-env.md 规范)
|
||||
# ---------------------------------------------------------------------------
|
||||
_ROOT = Path(__file__).resolve().parents[5] # scripts/ → feiqiu/ → connectors/ → etl/ → apps/ → 根目录
|
||||
load_dotenv(_ROOT / ".env")
|
||||
|
||||
PG_DSN = os.environ.get("PG_DSN")
|
||||
if not PG_DSN:
|
||||
raise RuntimeError("PG_DSN 未设置,请检查 .env 配置")
|
||||
|
||||
APP_DB_DSN = os.environ.get("APP_DB_DSN")
|
||||
if not APP_DB_DSN:
|
||||
raise RuntimeError("APP_DB_DSN 未设置,请检查 .env 配置")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 2. 数据库连接
|
||||
# ---------------------------------------------------------------------------
|
||||
try:
|
||||
import psycopg2
|
||||
except ImportError:
|
||||
print("ERROR: psycopg2 未安装,请执行 uv pip install psycopg2-binary")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 辅助函数
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class _Result:
|
||||
"""单条验证结果。"""
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
self.passed = True
|
||||
self.details: list[str] = []
|
||||
|
||||
def fail(self, msg: str) -> None:
|
||||
self.passed = False
|
||||
self.details.append(f" FAIL: {msg}")
|
||||
|
||||
def ok(self, msg: str) -> None:
|
||||
self.details.append(f" OK: {msg}")
|
||||
|
||||
def __str__(self) -> str:
|
||||
status = "PASS" if self.passed else "FAIL"
|
||||
header = f"[{status}] {self.name}"
|
||||
if self.details:
|
||||
return header + "\n" + "\n".join(self.details)
|
||||
return header
|
||||
|
||||
|
||||
def _query(conn, sql: str, params=None) -> list[tuple]:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(sql, params)
|
||||
return cur.fetchall()
|
||||
|
||||
|
||||
def _query_one(conn, sql: str, params=None):
|
||||
rows = _query(conn, sql, params)
|
||||
return rows[0] if rows else None
|
||||
|
||||
|
||||
def _column_exists(conn, schema: str, table: str, column: str) -> bool:
|
||||
row = _query_one(
|
||||
conn,
|
||||
"""
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_schema = %s AND table_name = %s AND column_name = %s
|
||||
""",
|
||||
(schema, table, column),
|
||||
)
|
||||
return row is not None
|
||||
|
||||
|
||||
def _table_exists(conn, schema: str, table: str) -> bool:
|
||||
row = _query_one(
|
||||
conn,
|
||||
"""
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_schema = %s AND table_name = %s
|
||||
""",
|
||||
(schema, table),
|
||||
)
|
||||
return row is not None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 验证 1:dws_assistant_order_contribution 四项统计(Req 9.1, 9.2)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def verify_contribution_table(conn) -> _Result:
|
||||
r = _Result("验证 1:dws_assistant_order_contribution 表结构与数据")
|
||||
|
||||
# 1a. 表存在
|
||||
if not _table_exists(conn, "dws", "dws_assistant_order_contribution"):
|
||||
r.fail("表 dws.dws_assistant_order_contribution 不存在")
|
||||
return r
|
||||
r.ok("表存在")
|
||||
|
||||
# 1b. 关键字段存在
|
||||
required_cols = [
|
||||
"contribution_id", "site_id", "tenant_id", "assistant_id",
|
||||
"assistant_nickname", "stat_date",
|
||||
"order_gross_revenue", "order_net_revenue",
|
||||
"time_weighted_revenue", "time_weighted_net_revenue",
|
||||
"order_count", "total_service_seconds",
|
||||
"created_at", "updated_at",
|
||||
]
|
||||
missing = [c for c in required_cols
|
||||
if not _column_exists(conn, "dws", "dws_assistant_order_contribution", c)]
|
||||
if missing:
|
||||
r.fail(f"缺少字段: {', '.join(missing)}")
|
||||
else:
|
||||
r.ok(f"全部 {len(required_cols)} 个字段存在")
|
||||
|
||||
# 1c. 唯一索引存在
|
||||
idx_row = _query_one(
|
||||
conn,
|
||||
"""
|
||||
SELECT indexname FROM pg_indexes
|
||||
WHERE schemaname = 'dws'
|
||||
AND tablename = 'dws_assistant_order_contribution'
|
||||
AND indexname = 'idx_aoc_site_assistant_date'
|
||||
""",
|
||||
)
|
||||
if idx_row:
|
||||
r.ok("唯一索引 idx_aoc_site_assistant_date 存在")
|
||||
else:
|
||||
r.fail("唯一索引 idx_aoc_site_assistant_date 不存在")
|
||||
|
||||
# 1d. 数据行数(信息性,不判 FAIL)
|
||||
row = _query_one(conn, "SELECT COUNT(*) FROM dws.dws_assistant_order_contribution")
|
||||
count = row[0] if row else 0
|
||||
r.ok(f"当前数据行数: {count}")
|
||||
|
||||
# 1e. 如果有数据,检查四项统计非负
|
||||
if count > 0:
|
||||
neg_row = _query_one(
|
||||
conn,
|
||||
"""
|
||||
SELECT COUNT(*) FROM dws.dws_assistant_order_contribution
|
||||
WHERE order_gross_revenue < 0
|
||||
OR order_net_revenue < 0
|
||||
OR time_weighted_revenue < 0
|
||||
OR time_weighted_net_revenue < 0
|
||||
""",
|
||||
)
|
||||
neg_count = neg_row[0] if neg_row else 0
|
||||
if neg_count > 0:
|
||||
r.fail(f"存在 {neg_count} 条四项统计为负值的记录")
|
||||
else:
|
||||
r.ok("四项统计数值均非负")
|
||||
|
||||
return r
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 验证 2:dws_member_consumption_summary 充值窗口字段(Req 9.3)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def verify_consumption_fields(conn) -> _Result:
|
||||
r = _Result("验证 2:dws_member_consumption_summary 充值窗口字段")
|
||||
|
||||
if not _table_exists(conn, "dws", "dws_member_consumption_summary"):
|
||||
r.fail("表 dws.dws_member_consumption_summary 不存在")
|
||||
return r
|
||||
r.ok("表存在")
|
||||
|
||||
new_cols = [
|
||||
"recharge_count_30d", "recharge_count_60d", "recharge_count_90d",
|
||||
"recharge_amount_30d", "recharge_amount_60d", "recharge_amount_90d",
|
||||
"avg_ticket_amount",
|
||||
]
|
||||
missing = [c for c in new_cols
|
||||
if not _column_exists(conn, "dws", "dws_member_consumption_summary", c)]
|
||||
if missing:
|
||||
r.fail(f"缺少新增字段: {', '.join(missing)}")
|
||||
else:
|
||||
r.ok(f"全部 {len(new_cols)} 个新增字段存在")
|
||||
|
||||
# 如果有数据,检查充值金额和次均消费非负
|
||||
row = _query_one(conn, "SELECT COUNT(*) FROM dws.dws_member_consumption_summary")
|
||||
count = row[0] if row else 0
|
||||
r.ok(f"当前数据行数: {count}")
|
||||
|
||||
if count > 0:
|
||||
neg_row = _query_one(
|
||||
conn,
|
||||
"""
|
||||
SELECT COUNT(*) FROM dws.dws_member_consumption_summary
|
||||
WHERE recharge_amount_30d < 0
|
||||
OR recharge_amount_60d < 0
|
||||
OR recharge_amount_90d < 0
|
||||
OR avg_ticket_amount < 0
|
||||
""",
|
||||
)
|
||||
neg_count = neg_row[0] if neg_row else 0
|
||||
if neg_count > 0:
|
||||
r.fail(f"存在 {neg_count} 条充值金额或次均消费为负值的记录")
|
||||
else:
|
||||
r.ok("充值金额和次均消费均非负")
|
||||
|
||||
return r
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 验证 3:dws_assistant_daily_detail 惩罚字段(Req 9.4)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def verify_penalty_fields(conn) -> _Result:
|
||||
r = _Result("验证 3:dws_assistant_daily_detail 惩罚字段")
|
||||
|
||||
if not _table_exists(conn, "dws", "dws_assistant_daily_detail"):
|
||||
r.fail("表 dws.dws_assistant_daily_detail 不存在")
|
||||
return r
|
||||
r.ok("表存在")
|
||||
|
||||
new_cols = ["penalty_minutes", "penalty_reason", "is_exempt", "per_hour_contribution"]
|
||||
missing = [c for c in new_cols
|
||||
if not _column_exists(conn, "dws", "dws_assistant_daily_detail", c)]
|
||||
if missing:
|
||||
r.fail(f"缺少新增字段: {', '.join(missing)}")
|
||||
else:
|
||||
r.ok(f"全部 {len(new_cols)} 个惩罚字段存在")
|
||||
|
||||
# 检查 is_exempt 字段类型为 boolean
|
||||
type_row = _query_one(
|
||||
conn,
|
||||
"""
|
||||
SELECT data_type FROM information_schema.columns
|
||||
WHERE table_schema = 'dws'
|
||||
AND table_name = 'dws_assistant_daily_detail'
|
||||
AND column_name = 'is_exempt'
|
||||
""",
|
||||
)
|
||||
if type_row and type_row[0] == "boolean":
|
||||
r.ok("is_exempt 字段类型为 boolean")
|
||||
elif type_row:
|
||||
r.fail(f"is_exempt 字段类型为 {type_row[0]},预期 boolean")
|
||||
|
||||
# 如果有数据,检查 penalty_minutes >= 0
|
||||
row = _query_one(conn, "SELECT COUNT(*) FROM dws.dws_assistant_daily_detail")
|
||||
count = row[0] if row else 0
|
||||
r.ok(f"当前数据行数: {count}")
|
||||
|
||||
if count > 0:
|
||||
neg_row = _query_one(
|
||||
conn,
|
||||
"""
|
||||
SELECT COUNT(*) FROM dws.dws_assistant_daily_detail
|
||||
WHERE penalty_minutes < 0
|
||||
""",
|
||||
)
|
||||
neg_count = neg_row[0] if neg_row else 0
|
||||
if neg_count > 0:
|
||||
r.fail(f"存在 {neg_count} 条 penalty_minutes 为负值的记录")
|
||||
else:
|
||||
r.ok("penalty_minutes 均非负")
|
||||
|
||||
return r
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 验证 4:RLS 视图和 FDW 映射(Req 7, 8)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def verify_rls_views(conn_etl) -> _Result:
|
||||
r = _Result("验证 4a:RLS 视图存在性(ETL 库 app schema)")
|
||||
|
||||
views = [
|
||||
"v_dws_assistant_order_contribution",
|
||||
"v_dws_member_consumption_summary",
|
||||
"v_dws_assistant_daily_detail",
|
||||
]
|
||||
for v in views:
|
||||
if _table_exists(conn_etl, "app", v):
|
||||
r.ok(f"视图 app.{v} 存在")
|
||||
else:
|
||||
r.fail(f"视图 app.{v} 不存在")
|
||||
|
||||
return r
|
||||
|
||||
|
||||
def verify_fdw_tables(conn_app) -> _Result:
|
||||
r = _Result("验证 4b:FDW 外部表存在性(业务库 fdw_etl schema)")
|
||||
|
||||
# FDW 外部表名与 RLS 视图名一致,带 v_ 前缀
|
||||
tables = [
|
||||
"v_dws_assistant_order_contribution",
|
||||
"v_dws_member_consumption_summary",
|
||||
"v_dws_assistant_daily_detail",
|
||||
]
|
||||
for t in tables:
|
||||
row = _query_one(
|
||||
conn_app,
|
||||
"""
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_schema = 'fdw_etl' AND table_name = %s
|
||||
""",
|
||||
(t,),
|
||||
)
|
||||
if row:
|
||||
r.ok(f"外部表 fdw_etl.{t} 存在")
|
||||
else:
|
||||
r.fail(f"外部表 fdw_etl.{t} 不存在")
|
||||
|
||||
return r
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 主函数
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def main() -> int:
|
||||
results: list[_Result] = []
|
||||
|
||||
# 连接 ETL 测试库
|
||||
try:
|
||||
conn_etl = psycopg2.connect(PG_DSN)
|
||||
conn_etl.autocommit = True
|
||||
except Exception as e:
|
||||
print(f"ERROR: 无法连接 ETL 库 ({PG_DSN[:40]}...): {e}")
|
||||
return 1
|
||||
|
||||
# 连接业务测试库
|
||||
try:
|
||||
conn_app = psycopg2.connect(APP_DB_DSN)
|
||||
conn_app.autocommit = True
|
||||
except Exception as e:
|
||||
print(f"ERROR: 无法连接业务库 ({APP_DB_DSN[:40]}...): {e}")
|
||||
conn_etl.close()
|
||||
return 1
|
||||
|
||||
try:
|
||||
print("=" * 60)
|
||||
print("DWS 层扩展验证 — 影子跑数验证")
|
||||
print("=" * 60)
|
||||
print()
|
||||
|
||||
# ETL 库验证
|
||||
results.append(verify_contribution_table(conn_etl))
|
||||
results.append(verify_consumption_fields(conn_etl))
|
||||
results.append(verify_penalty_fields(conn_etl))
|
||||
results.append(verify_rls_views(conn_etl))
|
||||
|
||||
# 业务库验证
|
||||
results.append(verify_fdw_tables(conn_app))
|
||||
|
||||
# 输出结果
|
||||
for r in results:
|
||||
print(r)
|
||||
print()
|
||||
|
||||
# 汇总
|
||||
total = len(results)
|
||||
passed = sum(1 for r in results if r.passed)
|
||||
failed = total - passed
|
||||
print("=" * 60)
|
||||
print(f"汇总: {passed}/{total} 通过, {failed} 失败")
|
||||
print("=" * 60)
|
||||
|
||||
return 0 if failed == 0 else 1
|
||||
|
||||
finally:
|
||||
conn_etl.close()
|
||||
conn_app.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -269,6 +269,9 @@ class DwdLoadTask(BaseTask):
|
||||
("days_on_shelf", "days_available", None),
|
||||
("sort_order", "sort", None),
|
||||
("time_slot_sale", "time_slot_sale", None), # CHANGE 2026-02-21: 新增分时段销售标记
|
||||
("warning_sales_day", "warning_sales_day", None), # CHANGE 2026-02-24: 库存预警日均销量
|
||||
("warning_day_max", "warning_day_max", None), # CHANGE 2026-02-24: 预警天数上限
|
||||
("warning_day_min", "warning_day_min", None), # CHANGE 2026-02-24: 预警天数下限
|
||||
],
|
||||
"dwd.dim_goods_category": [
|
||||
("category_id", "id", None),
|
||||
|
||||
@@ -13,6 +13,7 @@ DWS层ETL任务模块
|
||||
|
||||
from .base_dws_task import BaseDwsTask, TimeLayer, TimeWindow, CourseType, DiscountType
|
||||
from .assistant_daily_task import AssistantDailyTask
|
||||
from .assistant_order_contribution_task import AssistantOrderContributionTask
|
||||
from .assistant_monthly_task import AssistantMonthlyTask
|
||||
from .assistant_customer_task import AssistantCustomerTask
|
||||
from .assistant_salary_task import AssistantSalaryTask
|
||||
@@ -47,6 +48,7 @@ __all__ = [
|
||||
"DiscountType",
|
||||
# 助教维度
|
||||
"AssistantDailyTask",
|
||||
"AssistantOrderContributionTask",
|
||||
"AssistantMonthlyTask",
|
||||
"AssistantCustomerTask",
|
||||
"AssistantSalaryTask",
|
||||
|
||||
@@ -29,12 +29,19 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from collections import defaultdict
|
||||
from datetime import date, datetime, time, timedelta
|
||||
from decimal import Decimal, ROUND_HALF_UP
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple
|
||||
|
||||
from .base_dws_task import BaseDwsTask, CourseType, TaskContext
|
||||
|
||||
# 惩罚区域集合:大厅 A/B/C/S/TV + 麻将房 M1–M7
|
||||
PENALTY_AREAS: Set[str] = {
|
||||
"A", "B", "C", "S", "TV",
|
||||
"M1", "M2", "M3", "M4", "M5", "M6", "M7",
|
||||
}
|
||||
|
||||
|
||||
class AssistantDailyTask(BaseDwsTask):
|
||||
"""
|
||||
@@ -93,7 +100,7 @@ class AssistantDailyTask(BaseDwsTask):
|
||||
|
||||
def transform(self, extracted: Dict[str, Any], context: TaskContext) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
转换数据:按助教+日期聚合
|
||||
转换数据:按助教+日期聚合,并执行定档折算惩罚检测
|
||||
"""
|
||||
service_records = extracted['service_records']
|
||||
site_id = extracted['site_id']
|
||||
@@ -109,6 +116,68 @@ class AssistantDailyTask(BaseDwsTask):
|
||||
site_id
|
||||
)
|
||||
|
||||
# ── 定档折算惩罚检测 ──
|
||||
# 构造重叠检测所需的记录格式
|
||||
overlap_records = []
|
||||
for r in service_records:
|
||||
start_t = r.get("start_use_time")
|
||||
end_t = r.get("last_use_time")
|
||||
if start_t is None or end_t is None:
|
||||
continue
|
||||
overlap_records.append({
|
||||
"assistant_id": r.get("assistant_id"),
|
||||
"table_id": r.get("table_id"),
|
||||
"table_area": r.get("table_area_name", ""),
|
||||
"start_time": start_t,
|
||||
"end_time": end_t,
|
||||
"service_date": r.get("service_date"),
|
||||
})
|
||||
|
||||
violations = self.detect_overlap_violations(overlap_records, PENALTY_AREAS)
|
||||
|
||||
# 将惩罚信息填充到聚合结果
|
||||
for agg in aggregated:
|
||||
aid = agg["assistant_id"]
|
||||
stat_date = agg["stat_date"]
|
||||
key = (aid, stat_date)
|
||||
|
||||
if agg.get("is_exempt"):
|
||||
# 豁免:不计算惩罚
|
||||
agg["penalty_minutes"] = Decimal("0")
|
||||
agg["penalty_reason"] = None
|
||||
agg["is_exempt"] = True
|
||||
agg["per_hour_contribution"] = None
|
||||
elif key in violations:
|
||||
# 有违规:计算惩罚
|
||||
# 取第一条违规信息(同一天可能有多条,取最严重的)
|
||||
v_list = violations[key]
|
||||
overlap_count = max(v["overlap_count"] for v in v_list)
|
||||
# per_hour_contribution 需要从台费数据计算
|
||||
# 此处使用聚合后的 base_ledger_amount 和 base_hours 近似
|
||||
base_hours = agg.get("base_hours", Decimal("0"))
|
||||
base_amount = agg.get("base_ledger_amount", Decimal("0"))
|
||||
if base_hours > 0:
|
||||
per_hour = base_amount / base_hours / Decimal(str(overlap_count))
|
||||
else:
|
||||
per_hour = Decimal("0")
|
||||
|
||||
actual_minutes = agg.get("base_hours", Decimal("0")) * Decimal("60")
|
||||
penalty = self.compute_penalty_minutes(actual_minutes, per_hour)
|
||||
|
||||
agg["penalty_minutes"] = penalty
|
||||
agg["penalty_reason"] = (
|
||||
f"规则2违规:同台桌{overlap_count}名助教重叠挂台,"
|
||||
f"单人每小时贡献={per_hour:.2f}元"
|
||||
)
|
||||
agg["is_exempt"] = False
|
||||
agg["per_hour_contribution"] = per_hour
|
||||
else:
|
||||
# 无违规
|
||||
agg["penalty_minutes"] = Decimal("0")
|
||||
agg["penalty_reason"] = None
|
||||
agg["is_exempt"] = False
|
||||
agg["per_hour_contribution"] = None
|
||||
|
||||
return aggregated
|
||||
|
||||
# load() 已移除——使用 BaseDwsTask 默认实现(DATE_COL="stat_date")
|
||||
@@ -143,6 +212,9 @@ class AssistantDailyTask(BaseDwsTask):
|
||||
asl.real_use_seconds,
|
||||
asl.ledger_amount,
|
||||
asl.ledger_unit_price,
|
||||
asl.start_use_time,
|
||||
asl.last_use_time,
|
||||
asl.table_area_name,
|
||||
DATE(asl.start_use_time) AS service_date,
|
||||
COALESCE(ex.is_trash, 0) AS is_trash
|
||||
FROM dwd.dwd_assistant_service_log asl
|
||||
@@ -281,6 +353,131 @@ class AssistantDailyTask(BaseDwsTask):
|
||||
|
||||
return result
|
||||
|
||||
# ==========================================================================
|
||||
# 定档折算惩罚 — 纯函数(静态方法,不依赖数据库)
|
||||
# ==========================================================================
|
||||
|
||||
@staticmethod
|
||||
def detect_overlap_violations(
|
||||
service_records: List[Dict[str, Any]],
|
||||
penalty_areas: Set[str],
|
||||
) -> Dict[Tuple[int, date], List[Dict[str, Any]]]:
|
||||
"""
|
||||
检测同一台桌同一时间段超过 2 名助教挂台的违规。
|
||||
|
||||
输入:
|
||||
service_records: 服务记录列表,每条需包含
|
||||
assistant_id, table_id, table_area, start_time, end_time, service_date
|
||||
penalty_areas: 需要检测的区域集合(如 PENALTY_AREAS)
|
||||
|
||||
输出:
|
||||
{(assistant_id, service_date): [violation_info, ...]}
|
||||
violation_info 包含 table_id, overlap_count, assistant_ids 等
|
||||
|
||||
算法:
|
||||
1. 过滤出属于惩罚区域的记录
|
||||
2. 按 (table_id, service_date) 分组
|
||||
3. 对每组用扫描线算法检测最大同时在线助教数
|
||||
4. 若峰值 > 2,标记所有参与助教为违规
|
||||
"""
|
||||
# 过滤:仅保留惩罚区域内的记录,且时间信息完整
|
||||
filtered = []
|
||||
for r in service_records:
|
||||
area = r.get("table_area", "")
|
||||
if area not in penalty_areas:
|
||||
continue
|
||||
if r.get("start_time") is None or r.get("end_time") is None:
|
||||
continue
|
||||
filtered.append(r)
|
||||
|
||||
# 按 (table_id, service_date) 分组
|
||||
groups: Dict[Tuple[int, date], List[Dict[str, Any]]] = defaultdict(list)
|
||||
for r in filtered:
|
||||
key = (r["table_id"], r["service_date"])
|
||||
groups[key].append(r)
|
||||
|
||||
violations: Dict[Tuple[int, date], List[Dict[str, Any]]] = defaultdict(list)
|
||||
|
||||
for (table_id, svc_date), records in groups.items():
|
||||
if len(records) <= 2:
|
||||
# 不可能超过 2 名助教
|
||||
continue
|
||||
|
||||
# 扫描线:收集所有事件点,检测峰值
|
||||
events: List[Tuple[Any, int, int]] = [] # (time, +1/-1, assistant_id)
|
||||
for r in records:
|
||||
aid = r["assistant_id"]
|
||||
events.append((r["start_time"], 1, aid))
|
||||
events.append((r["end_time"], -1, aid))
|
||||
|
||||
# 按时间排序;同一时刻先处理 +1(开始)再处理 -1(结束),
|
||||
# 这样"恰好交接"也算重叠
|
||||
events.sort(key=lambda e: (e[0], -e[1]))
|
||||
|
||||
# 扫描:追踪当前在线助教集合
|
||||
active: Dict[int, int] = defaultdict(int) # assistant_id -> 计数
|
||||
max_overlap = 0
|
||||
max_overlap_aids: Set[int] = set()
|
||||
|
||||
for t, delta, aid in events:
|
||||
active[aid] += delta
|
||||
if active[aid] <= 0:
|
||||
del active[aid]
|
||||
|
||||
current_count = len(active)
|
||||
if current_count > max_overlap:
|
||||
max_overlap = current_count
|
||||
max_overlap_aids = set(active.keys())
|
||||
elif current_count == max_overlap and current_count > 2:
|
||||
max_overlap_aids |= set(active.keys())
|
||||
|
||||
if max_overlap > 2:
|
||||
violation_info = {
|
||||
"table_id": table_id,
|
||||
"service_date": svc_date,
|
||||
"overlap_count": max_overlap,
|
||||
"assistant_ids": max_overlap_aids,
|
||||
}
|
||||
# 为每个涉及的助教记录违规
|
||||
for aid in max_overlap_aids:
|
||||
violations[(aid, svc_date)].append(violation_info)
|
||||
|
||||
return dict(violations)
|
||||
|
||||
@staticmethod
|
||||
def compute_penalty_minutes(
|
||||
actual_minutes: Decimal,
|
||||
per_hour_contribution: Decimal,
|
||||
threshold: Decimal = Decimal("24"),
|
||||
) -> Decimal:
|
||||
"""
|
||||
计算惩罚分钟数(纯函数)。
|
||||
|
||||
规则:
|
||||
- per_hour_contribution >= threshold → 0(满额计入)
|
||||
- per_hour_contribution < threshold →
|
||||
actual_minutes × (1 - per_hour_contribution / threshold)
|
||||
- per_hour_contribution < 0 → 视为 0(防御性编程)
|
||||
|
||||
结果范围:[0, actual_minutes]
|
||||
"""
|
||||
if actual_minutes <= 0:
|
||||
return Decimal("0")
|
||||
|
||||
# 防御性:负值视为 0
|
||||
phc = max(per_hour_contribution, Decimal("0"))
|
||||
|
||||
if phc >= threshold:
|
||||
return Decimal("0")
|
||||
|
||||
# penalty = actual_minutes × (1 - phc / threshold)
|
||||
ratio = Decimal("1") - phc / threshold
|
||||
penalty = actual_minutes * ratio
|
||||
|
||||
# 确保结果在 [0, actual_minutes] 范围内
|
||||
penalty = max(Decimal("0"), min(penalty, actual_minutes))
|
||||
return penalty
|
||||
|
||||
|
||||
# 便于外部导入
|
||||
__all__ = ['AssistantDailyTask']
|
||||
__all__ = ['AssistantDailyTask', 'PENALTY_AREAS']
|
||||
|
||||
@@ -0,0 +1,542 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
助教订单流水四项统计任务
|
||||
|
||||
功能说明:
|
||||
以"助教+日期"为粒度,计算每名助教每日的订单流水贡献:
|
||||
- order_gross_revenue: 订单总流水(台费 + 酒水食品 + 所有助教服务费)
|
||||
- order_net_revenue: 订单净流水(订单总流水 - 所有助教服务分成)
|
||||
- time_weighted_revenue: 时效贡献流水(按服务时长折算的个人贡献)
|
||||
- time_weighted_net_revenue: 时效净贡献(时效贡献流水 - 个人服务分成)
|
||||
|
||||
数据来源:
|
||||
- dwd_settlement_head: 结算主表
|
||||
- dwd_table_fee_log: 台费明细
|
||||
- dwd_assistant_service_log: 助教服务记录
|
||||
|
||||
目标表:
|
||||
dws.dws_assistant_order_contribution
|
||||
|
||||
更新策略:
|
||||
- 幂等方式:delete-before-insert(按日期窗口)
|
||||
|
||||
核心算法:
|
||||
时效贡献流水按以下步骤计算:
|
||||
1. 每张台桌的有效计费时长 = MAX(助教总服务时长, 台桌使用时长)
|
||||
2. 台费分摊 = table_fee × (个人服务时长 / 有效计费时长)
|
||||
3. 个人服务费直接计入
|
||||
4. 酒水食品按助教总时长比例均分
|
||||
|
||||
超休/打赏课(course_type=BONUS)不参与订单级分摊,
|
||||
四项统计均设为该助教个人的服务流水和分成。
|
||||
|
||||
作者:ETL团队
|
||||
创建日期:2026-02-24
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import date
|
||||
from decimal import Decimal
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from .base_dws_task import BaseDwsTask, TaskContext
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 数据结构
|
||||
# =============================================================================
|
||||
|
||||
@dataclass
|
||||
class TableUsage:
|
||||
"""台桌使用信息"""
|
||||
table_id: int
|
||||
table_area: str # 区域名称(A/B/C/S/TV/M1-M7 等)
|
||||
usage_seconds: int # 台桌使用时长(秒)
|
||||
table_fee: Decimal # 台费/房费
|
||||
|
||||
|
||||
@dataclass
|
||||
class AssistantService:
|
||||
"""助教服务记录"""
|
||||
assistant_id: int
|
||||
table_id: int
|
||||
service_seconds: int # 服务时长(秒)
|
||||
ledger_amount: Decimal # 服务流水(助教收费)
|
||||
commission: Decimal # 助教分成
|
||||
skill_id: int
|
||||
course_type: str # BASE / BONUS / ROOM
|
||||
nickname: str = "" # 助教昵称(用于输出)
|
||||
|
||||
|
||||
@dataclass
|
||||
class OrderData:
|
||||
"""订单聚合数据(一个结算单的完整信息)"""
|
||||
order_settle_id: int
|
||||
site_id: int
|
||||
total_table_fee: Decimal # 台费总额
|
||||
total_goods_amount: Decimal # 酒水食品总额
|
||||
tables: List[TableUsage] = field(default_factory=list)
|
||||
assistants: List[AssistantService] = field(default_factory=list)
|
||||
stat_date: date | None = None # 订单日期(pay_time 的日期部分)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# 助教订单流水统计任务
|
||||
# =============================================================================
|
||||
|
||||
class AssistantOrderContributionTask(BaseDwsTask):
|
||||
"""
|
||||
助教订单流水四项统计任务
|
||||
|
||||
粒度:(site_id, assistant_id, stat_date)
|
||||
策略:delete-before-insert 幂等更新
|
||||
"""
|
||||
|
||||
DATE_COL = "stat_date"
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "DWS_ASSISTANT_ORDER_CONTRIBUTION"
|
||||
|
||||
def get_target_table(self) -> str:
|
||||
return "dws_assistant_order_contribution"
|
||||
|
||||
def get_primary_keys(self) -> List[str]:
|
||||
return ["site_id", "assistant_id", "stat_date"]
|
||||
|
||||
# =========================================================================
|
||||
# ETL 主流程(骨架,后续任务实现)
|
||||
# =========================================================================
|
||||
|
||||
def extract(self, context: TaskContext) -> Dict[str, Any]:
|
||||
"""提取数据:从 DWD 层读取结算、台费和助教服务数据,按订单聚合为 OrderData"""
|
||||
start_date = context.window_start.date() if hasattr(context.window_start, 'date') else context.window_start
|
||||
end_date = context.window_end.date() if hasattr(context.window_end, 'date') else context.window_end
|
||||
site_id = context.store_id
|
||||
|
||||
self.logger.info(
|
||||
"%s: 提取数据,日期范围 %s ~ %s",
|
||||
self.get_task_code(), start_date, end_date
|
||||
)
|
||||
|
||||
# 1. 提取台桌结账订单的结算主表(settle_type=1 为台桌结账)
|
||||
settlements = self._extract_settlements(site_id, start_date, end_date)
|
||||
|
||||
# 2. 提取台费明细
|
||||
table_fees = self._extract_table_fees(site_id, start_date, end_date)
|
||||
|
||||
# 3. 提取助教服务记录(含课程类型映射)
|
||||
service_logs = self._extract_service_logs(site_id, start_date, end_date)
|
||||
|
||||
# 4. 按 order_settle_id 聚合为 OrderData 列表
|
||||
orders = self._aggregate_to_orders(settlements, table_fees, service_logs)
|
||||
|
||||
self.logger.info(
|
||||
"%s: 提取完成,结算单 %d 条,聚合订单 %d 条",
|
||||
self.get_task_code(), len(settlements), len(orders)
|
||||
)
|
||||
|
||||
return {
|
||||
'orders': orders,
|
||||
'start_date': start_date,
|
||||
'end_date': end_date,
|
||||
'site_id': site_id,
|
||||
}
|
||||
|
||||
def transform(self, extracted: Dict[str, Any], context: TaskContext) -> List[Dict[str, Any]]:
|
||||
"""转换数据:调用四项统计计算,按 (assistant_id, stat_date) 聚合日度统计"""
|
||||
orders: List[OrderData] = extracted['orders']
|
||||
site_id = extracted['site_id']
|
||||
|
||||
self.logger.info(
|
||||
"%s: 转换数据,订单 %d 条",
|
||||
self.get_task_code(), len(orders)
|
||||
)
|
||||
|
||||
# 按 (assistant_id, stat_date) 聚合
|
||||
agg: Dict[tuple, Dict[str, Any]] = {}
|
||||
|
||||
for order in orders:
|
||||
# 跳过无助教服务的订单
|
||||
if not order.assistants:
|
||||
continue
|
||||
|
||||
# 获取订单日期(从结算主表的 pay_time 推导,存储在 order 中)
|
||||
stat_date = getattr(order, 'stat_date', None)
|
||||
if stat_date is None:
|
||||
continue
|
||||
|
||||
# 收集该订单所有参与助教(去重)
|
||||
assistant_ids = set(a.assistant_id for a in order.assistants)
|
||||
|
||||
for aid in assistant_ids:
|
||||
contribution = self.compute_assistant_contribution(order, aid)
|
||||
key = (aid, stat_date)
|
||||
|
||||
if key not in agg:
|
||||
# 获取助教昵称(取第一条服务记录的昵称)
|
||||
nickname = next(
|
||||
(a.nickname for a in order.assistants if a.assistant_id == aid),
|
||||
None
|
||||
)
|
||||
agg[key] = {
|
||||
'site_id': site_id,
|
||||
'tenant_id': order.site_id, # tenant_id 与 site_id 相同
|
||||
'assistant_id': aid,
|
||||
'assistant_nickname': nickname,
|
||||
'stat_date': stat_date,
|
||||
'order_gross_revenue': Decimal('0'),
|
||||
'order_net_revenue': Decimal('0'),
|
||||
'time_weighted_revenue': Decimal('0'),
|
||||
'time_weighted_net_revenue': Decimal('0'),
|
||||
'order_count': 0,
|
||||
'total_service_seconds': 0,
|
||||
}
|
||||
|
||||
rec = agg[key]
|
||||
rec['order_gross_revenue'] += contribution['order_gross_revenue']
|
||||
rec['order_net_revenue'] += contribution['order_net_revenue']
|
||||
rec['time_weighted_revenue'] += contribution['time_weighted_revenue']
|
||||
rec['time_weighted_net_revenue'] += contribution['time_weighted_net_revenue']
|
||||
rec['order_count'] += 1
|
||||
# 累加该助教在该订单中的总服务时长
|
||||
rec['total_service_seconds'] += sum(
|
||||
a.service_seconds for a in order.assistants if a.assistant_id == aid
|
||||
)
|
||||
|
||||
result = list(agg.values())
|
||||
self.logger.info(
|
||||
"%s: 转换完成,输出 %d 条助教日度统计",
|
||||
self.get_task_code(), len(result)
|
||||
)
|
||||
return result
|
||||
|
||||
# load() 使用 BaseDwsTask 默认实现(DATE_COL="stat_date")
|
||||
|
||||
# =========================================================================
|
||||
# 数据提取方法
|
||||
# =========================================================================
|
||||
|
||||
def _extract_settlements(
|
||||
self, site_id: int, start_date: date, end_date: date
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""提取台桌结账订单的结算主表
|
||||
|
||||
settle_type=1 为台桌结账,包含台费、酒水食品等金额。
|
||||
"""
|
||||
sql = """
|
||||
SELECT
|
||||
order_settle_id,
|
||||
site_id,
|
||||
tenant_id,
|
||||
table_charge_money,
|
||||
goods_money,
|
||||
DATE(pay_time) AS stat_date
|
||||
FROM dwd.dwd_settlement_head
|
||||
WHERE site_id = %s
|
||||
AND settle_type = 1
|
||||
AND DATE(pay_time) >= %s
|
||||
AND DATE(pay_time) <= %s
|
||||
"""
|
||||
rows = self.db.query(sql, (site_id, start_date, end_date))
|
||||
return [dict(row) for row in rows] if rows else []
|
||||
|
||||
def _extract_table_fees(
|
||||
self, site_id: int, start_date: date, end_date: date
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""提取台费明细
|
||||
|
||||
每条记录对应一张台桌在一个订单中的台费信息。
|
||||
real_table_use_seconds 为台桌实际使用时长。
|
||||
"""
|
||||
sql = """
|
||||
SELECT
|
||||
tfl.order_settle_id,
|
||||
tfl.site_table_id AS table_id,
|
||||
COALESCE(tfl.site_table_area_name, '') AS table_area,
|
||||
COALESCE(tfl.real_table_use_seconds, 0) AS usage_seconds,
|
||||
COALESCE(tfl.ledger_amount, 0) AS table_fee
|
||||
FROM dwd.dwd_table_fee_log tfl
|
||||
WHERE tfl.site_id = %s
|
||||
AND DATE(tfl.start_use_time) >= %s
|
||||
AND DATE(tfl.start_use_time) <= %s
|
||||
AND COALESCE(tfl.is_delete, 0) = 0
|
||||
"""
|
||||
rows = self.db.query(sql, (site_id, start_date, end_date))
|
||||
return [dict(row) for row in rows] if rows else []
|
||||
|
||||
def _extract_service_logs(
|
||||
self, site_id: int, start_date: date, end_date: date
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""提取助教服务记录(含课程类型映射)
|
||||
|
||||
通过 LEFT JOIN cfg_skill_type 获取 course_type_code,
|
||||
real_service_money 为助教分成。
|
||||
"""
|
||||
sql = """
|
||||
SELECT
|
||||
asl.order_settle_id,
|
||||
asl.site_assistant_id AS assistant_id,
|
||||
asl.nickname,
|
||||
asl.site_table_id AS table_id,
|
||||
COALESCE(asl.income_seconds, 0) AS service_seconds,
|
||||
COALESCE(asl.ledger_amount, 0) AS ledger_amount,
|
||||
COALESCE(asl.real_service_money, 0) AS commission,
|
||||
COALESCE(asl.skill_id, 0) AS skill_id,
|
||||
COALESCE(cst.course_type_code, 'BASE') AS course_type
|
||||
FROM dwd.dwd_assistant_service_log asl
|
||||
LEFT JOIN dws.cfg_skill_type cst
|
||||
ON asl.skill_id = cst.skill_id
|
||||
AND cst.is_active = TRUE
|
||||
WHERE asl.site_id = %s
|
||||
AND DATE(asl.start_use_time) >= %s
|
||||
AND DATE(asl.start_use_time) <= %s
|
||||
AND COALESCE(asl.is_delete, 0) = 0
|
||||
"""
|
||||
rows = self.db.query(sql, (site_id, start_date, end_date))
|
||||
return [dict(row) for row in rows] if rows else []
|
||||
|
||||
def _aggregate_to_orders(
|
||||
self,
|
||||
settlements: List[Dict[str, Any]],
|
||||
table_fees: List[Dict[str, Any]],
|
||||
service_logs: List[Dict[str, Any]],
|
||||
) -> List[OrderData]:
|
||||
"""按 order_settle_id 聚合为 OrderData 列表
|
||||
|
||||
只保留有助教服务记录的订单(无助教的订单在 transform 中也会跳过)。
|
||||
"""
|
||||
from collections import defaultdict
|
||||
|
||||
# 按 order_settle_id 索引台费和服务记录
|
||||
table_fee_map: Dict[int, List[Dict]] = defaultdict(list)
|
||||
for tf in table_fees:
|
||||
table_fee_map[tf['order_settle_id']].append(tf)
|
||||
|
||||
service_map: Dict[int, List[Dict]] = defaultdict(list)
|
||||
for sl in service_logs:
|
||||
service_map[sl['order_settle_id']].append(sl)
|
||||
|
||||
orders: List[OrderData] = []
|
||||
for settle in settlements:
|
||||
oid = settle['order_settle_id']
|
||||
svc_list = service_map.get(oid)
|
||||
# 跳过无助教服务的订单
|
||||
if not svc_list:
|
||||
continue
|
||||
|
||||
tables = [
|
||||
TableUsage(
|
||||
table_id=int(tf['table_id']),
|
||||
table_area=tf['table_area'],
|
||||
usage_seconds=int(tf['usage_seconds']),
|
||||
table_fee=Decimal(str(tf['table_fee'])),
|
||||
)
|
||||
for tf in table_fee_map.get(oid, [])
|
||||
]
|
||||
|
||||
assistants = [
|
||||
AssistantService(
|
||||
assistant_id=int(sl['assistant_id']),
|
||||
table_id=int(sl['table_id']),
|
||||
service_seconds=int(sl['service_seconds']),
|
||||
ledger_amount=Decimal(str(sl['ledger_amount'])),
|
||||
commission=Decimal(str(sl['commission'])),
|
||||
skill_id=int(sl['skill_id']),
|
||||
course_type=sl['course_type'],
|
||||
nickname=sl.get('nickname', ''),
|
||||
)
|
||||
for sl in svc_list
|
||||
]
|
||||
|
||||
orders.append(OrderData(
|
||||
order_settle_id=int(oid),
|
||||
site_id=int(settle['site_id']),
|
||||
total_table_fee=Decimal(str(settle.get('table_charge_money') or 0)),
|
||||
total_goods_amount=Decimal(str(settle.get('goods_money') or 0)),
|
||||
tables=tables,
|
||||
assistants=assistants,
|
||||
stat_date=settle.get('stat_date'),
|
||||
))
|
||||
|
||||
return orders
|
||||
|
||||
# =========================================================================
|
||||
# 核心计算(纯函数,不依赖数据库,便于属性测试)
|
||||
# =========================================================================
|
||||
|
||||
@staticmethod
|
||||
def compute_order_gross_revenue(order: OrderData) -> Decimal:
|
||||
"""订单总流水 = 台费 + 酒水食品 + 所有助教服务费
|
||||
|
||||
每个参与助教获得相同的 order_gross_revenue 值。
|
||||
"""
|
||||
total_service_amount = sum(
|
||||
(a.ledger_amount for a in order.assistants), Decimal('0')
|
||||
)
|
||||
return order.total_table_fee + order.total_goods_amount + total_service_amount
|
||||
|
||||
@staticmethod
|
||||
def compute_order_net_revenue(order: OrderData) -> Decimal:
|
||||
"""订单净流水 = 订单总流水 - 所有助教服务分成
|
||||
|
||||
每个参与助教获得相同的 order_net_revenue 值。
|
||||
"""
|
||||
gross = AssistantOrderContributionTask.compute_order_gross_revenue(order)
|
||||
total_commission = sum(
|
||||
(a.commission for a in order.assistants), Decimal('0')
|
||||
)
|
||||
return gross - total_commission
|
||||
|
||||
@staticmethod
|
||||
def compute_time_weighted_revenue(
|
||||
order: OrderData, assistant_id: int
|
||||
) -> Decimal:
|
||||
"""时效贡献流水 = 台费按时长分摊 + 个人服务费 + 酒水食品按时长比例
|
||||
|
||||
算法步骤:
|
||||
1. 每张台桌:billable_seconds = MAX(助教总服务时长, 台桌使用时长)
|
||||
台费分摊 = table_fee × (个人服务时长 / billable_seconds)
|
||||
2. 个人服务费(ledger_amount)直接计入
|
||||
3. 酒水食品按个人总服务时长占所有助教总服务时长的比例均分
|
||||
|
||||
超休/打赏课(BONUS):四项统计均设为个人服务流水和分成,
|
||||
不参与订单级分摊。此逻辑在调用方处理,本方法仅处理常规情况。
|
||||
|
||||
边界情况:
|
||||
- 台桌使用时长为 0 且助教总服务时长也为 0:台费分摊 = 0
|
||||
- 助教总服务时长为 0:酒水食品分摊 = 0
|
||||
"""
|
||||
# --- 筛选该助教的服务记录(排除 BONUS 类型) ---
|
||||
my_services = [
|
||||
a for a in order.assistants
|
||||
if a.assistant_id == assistant_id and a.course_type != "BONUS"
|
||||
]
|
||||
all_non_bonus = [a for a in order.assistants if a.course_type != "BONUS"]
|
||||
|
||||
# 如果该助教无非 BONUS 服务记录,返回 0
|
||||
if not my_services:
|
||||
return Decimal('0')
|
||||
|
||||
# --- 步骤 1:台费按时长分摊 ---
|
||||
table_fee_share = Decimal('0')
|
||||
for table in order.tables:
|
||||
# 该台桌上所有助教的服务时长之和
|
||||
table_total_svc = sum(
|
||||
a.service_seconds for a in all_non_bonus
|
||||
if a.table_id == table.table_id
|
||||
)
|
||||
# 该助教在该台桌的服务时长
|
||||
my_table_svc = sum(
|
||||
a.service_seconds for a in my_services
|
||||
if a.table_id == table.table_id
|
||||
)
|
||||
if my_table_svc == 0:
|
||||
continue
|
||||
|
||||
# 有效计费时长 = MAX(助教总服务时长, 台桌使用时长)
|
||||
billable_seconds = max(table_total_svc, table.usage_seconds)
|
||||
if billable_seconds <= 0:
|
||||
continue
|
||||
|
||||
table_fee_share += table.table_fee * Decimal(my_table_svc) / Decimal(billable_seconds)
|
||||
|
||||
# --- 步骤 2:个人服务费直接计入 ---
|
||||
personal_service = sum(
|
||||
(a.ledger_amount for a in my_services), Decimal('0')
|
||||
)
|
||||
|
||||
# --- 步骤 3:酒水食品按总时长比例均分 ---
|
||||
my_total_seconds = sum(a.service_seconds for a in my_services)
|
||||
all_total_seconds = sum(a.service_seconds for a in all_non_bonus)
|
||||
|
||||
if all_total_seconds > 0 and my_total_seconds > 0:
|
||||
goods_share = order.total_goods_amount * Decimal(my_total_seconds) / Decimal(all_total_seconds)
|
||||
else:
|
||||
goods_share = Decimal('0')
|
||||
|
||||
return table_fee_share + personal_service + goods_share
|
||||
|
||||
@staticmethod
|
||||
def compute_time_weighted_net_revenue(
|
||||
time_weighted_revenue: Decimal, assistant_commission: Decimal
|
||||
) -> Decimal:
|
||||
"""时效净贡献 = 时效贡献流水 - 个人服务分成"""
|
||||
return time_weighted_revenue - assistant_commission
|
||||
|
||||
@staticmethod
|
||||
def compute_assistant_contribution(
|
||||
order: OrderData, assistant_id: int
|
||||
) -> Dict[str, Decimal]:
|
||||
"""计算单个助教在单个订单中的四项统计(含 BONUS 特殊处理)
|
||||
|
||||
返回字典包含:
|
||||
- order_gross_revenue
|
||||
- order_net_revenue
|
||||
- time_weighted_revenue
|
||||
- time_weighted_net_revenue
|
||||
- total_commission(该助教个人分成,辅助字段)
|
||||
|
||||
超休/打赏课(BONUS):四项统计均设为个人服务流水和分成,
|
||||
不参与订单级分摊。
|
||||
"""
|
||||
cls = AssistantOrderContributionTask
|
||||
|
||||
# 该助教的所有服务记录
|
||||
my_services = [a for a in order.assistants if a.assistant_id == assistant_id]
|
||||
if not my_services:
|
||||
return {
|
||||
'order_gross_revenue': Decimal('0'),
|
||||
'order_net_revenue': Decimal('0'),
|
||||
'time_weighted_revenue': Decimal('0'),
|
||||
'time_weighted_net_revenue': Decimal('0'),
|
||||
'total_commission': Decimal('0'),
|
||||
}
|
||||
|
||||
# 分离 BONUS 和非 BONUS 服务
|
||||
bonus_services = [a for a in my_services if a.course_type == "BONUS"]
|
||||
normal_services = [a for a in my_services if a.course_type != "BONUS"]
|
||||
|
||||
# BONUS 部分:直接用个人流水
|
||||
bonus_revenue = sum((a.ledger_amount for a in bonus_services), Decimal('0'))
|
||||
bonus_commission = sum((a.commission for a in bonus_services), Decimal('0'))
|
||||
|
||||
if normal_services:
|
||||
# 有常规服务:按正常逻辑计算
|
||||
normal_commission = sum((a.commission for a in normal_services), Decimal('0'))
|
||||
total_commission = normal_commission + bonus_commission
|
||||
gross = cls.compute_order_gross_revenue(order)
|
||||
net = cls.compute_order_net_revenue(order)
|
||||
twr = cls.compute_time_weighted_revenue(order, assistant_id)
|
||||
|
||||
# 合成最终值(先算 time_weighted_revenue 再减 total_commission,
|
||||
# 保证 twnr == twr_final - total_commission 精度一致)
|
||||
twr_final = twr + bonus_revenue
|
||||
twnr_final = twr_final - total_commission
|
||||
|
||||
return {
|
||||
'order_gross_revenue': gross + bonus_revenue,
|
||||
'order_net_revenue': net + (bonus_revenue - bonus_commission),
|
||||
'time_weighted_revenue': twr_final,
|
||||
'time_weighted_net_revenue': twnr_final,
|
||||
'total_commission': total_commission,
|
||||
}
|
||||
else:
|
||||
# 纯 BONUS 助教:四项统计均为个人流水
|
||||
return {
|
||||
'order_gross_revenue': bonus_revenue,
|
||||
'order_net_revenue': bonus_revenue - bonus_commission,
|
||||
'time_weighted_revenue': bonus_revenue,
|
||||
'time_weighted_net_revenue': bonus_revenue - bonus_commission,
|
||||
'total_commission': bonus_commission,
|
||||
}
|
||||
|
||||
|
||||
# 便于外部导入
|
||||
__all__ = [
|
||||
'TableUsage',
|
||||
'AssistantService',
|
||||
'OrderData',
|
||||
'AssistantOrderContributionTask',
|
||||
]
|
||||
@@ -85,10 +85,14 @@ class MemberConsumptionTask(BaseDwsTask):
|
||||
# 3. 获取会员卡余额
|
||||
card_balances = self._extract_card_balances(site_id)
|
||||
|
||||
# CHANGE 2025-07-15 | task 4.1: 获取充值统计(30/60/90 天窗口)
|
||||
recharge_stats = self._extract_recharge_stats(site_id, stat_date)
|
||||
|
||||
return {
|
||||
'consumption_stats': consumption_stats,
|
||||
'member_info': member_info,
|
||||
'card_balances': card_balances,
|
||||
'recharge_stats': recharge_stats,
|
||||
'stat_date': stat_date,
|
||||
'site_id': site_id
|
||||
}
|
||||
@@ -100,6 +104,7 @@ class MemberConsumptionTask(BaseDwsTask):
|
||||
consumption_stats = extracted['consumption_stats']
|
||||
member_info = extracted['member_info']
|
||||
card_balances = extracted['card_balances']
|
||||
recharge_stats = extracted.get('recharge_stats', {})
|
||||
stat_date = extracted['stat_date']
|
||||
site_id = extracted['site_id']
|
||||
|
||||
@@ -119,11 +124,20 @@ class MemberConsumptionTask(BaseDwsTask):
|
||||
|
||||
memb_info = member_info.get(member_id, {})
|
||||
balance = card_balances.get(member_id, {})
|
||||
# CHANGE 2025-07-15 | task 4.2: 合并充值统计,无记录时默认 0
|
||||
recharge = recharge_stats.get(member_id, {})
|
||||
|
||||
# 计算活跃度和客户分层
|
||||
days_since_last = self._calc_days_since(stat_date, stats.get('last_consume_date'))
|
||||
customer_tier = self._calculate_customer_tier(stats, days_since_last)
|
||||
|
||||
# CHANGE 2025-07-15 | task 4.2: 次均消费 = total_consume_amount / MAX(total_visit_count, 1)
|
||||
total_consume_amount = self.safe_decimal(stats.get('total_consume_amount', 0))
|
||||
total_visit_count = self.safe_int(stats.get('total_visit_count', 0))
|
||||
avg_ticket_amount = (
|
||||
total_consume_amount / max(total_visit_count, 1)
|
||||
).quantize(Decimal('0.01'))
|
||||
|
||||
record = {
|
||||
'site_id': site_id,
|
||||
'tenant_id': self.config.get("app.tenant_id", site_id),
|
||||
@@ -137,8 +151,8 @@ class MemberConsumptionTask(BaseDwsTask):
|
||||
# 全量累计统计
|
||||
'first_consume_date': stats.get('first_consume_date'),
|
||||
'last_consume_date': stats.get('last_consume_date'),
|
||||
'total_visit_count': self.safe_int(stats.get('total_visit_count', 0)),
|
||||
'total_consume_amount': self.safe_decimal(stats.get('total_consume_amount', 0)),
|
||||
'total_visit_count': total_visit_count,
|
||||
'total_consume_amount': total_consume_amount,
|
||||
'total_recharge_amount': self.safe_decimal(memb_info.get('recharge_money_sum', 0)),
|
||||
'total_table_fee': self.safe_decimal(stats.get('total_table_fee', 0)),
|
||||
'total_goods_amount': self.safe_decimal(stats.get('total_goods_amount', 0)),
|
||||
@@ -156,6 +170,15 @@ class MemberConsumptionTask(BaseDwsTask):
|
||||
'consume_amount_30d': self.safe_decimal(stats.get('consume_amount_30d', 0)),
|
||||
'consume_amount_60d': self.safe_decimal(stats.get('consume_amount_60d', 0)),
|
||||
'consume_amount_90d': self.safe_decimal(stats.get('consume_amount_90d', 0)),
|
||||
# 充值窗口统计(30/60/90 天)
|
||||
'recharge_count_30d': self.safe_int(recharge.get('count_30d', 0)),
|
||||
'recharge_count_60d': self.safe_int(recharge.get('count_60d', 0)),
|
||||
'recharge_count_90d': self.safe_int(recharge.get('count_90d', 0)),
|
||||
'recharge_amount_30d': self.safe_decimal(recharge.get('amount_30d', 0)),
|
||||
'recharge_amount_60d': self.safe_decimal(recharge.get('amount_60d', 0)),
|
||||
'recharge_amount_90d': self.safe_decimal(recharge.get('amount_90d', 0)),
|
||||
# 次均消费
|
||||
'avg_ticket_amount': avg_ticket_amount,
|
||||
# 卡余额
|
||||
'cash_card_balance': self.safe_decimal(balance.get('cash_balance', 0)),
|
||||
'gift_card_balance': self.safe_decimal(balance.get('gift_balance', 0)),
|
||||
@@ -259,13 +282,14 @@ class MemberConsumptionTask(BaseDwsTask):
|
||||
) AS birthday
|
||||
FROM dwd.dim_member m
|
||||
WHERE m.member_id IN (
|
||||
SELECT DISTINCT tenant_member_id
|
||||
SELECT DISTINCT member_id
|
||||
FROM dwd.dwd_settlement_head
|
||||
WHERE site_id = %s
|
||||
AND tenant_member_id IS NOT NULL
|
||||
AND tenant_member_id != 0
|
||||
AND member_id IS NOT NULL
|
||||
AND member_id != 0
|
||||
) AND m.scd2_is_current = 1
|
||||
"""
|
||||
# CHANGE 2026-02-24 | 修复列名:tenant_member_id → member_id(dwd_settlement_head 无 tenant_member_id 列)
|
||||
sql_fallback = """
|
||||
SELECT
|
||||
member_id,
|
||||
@@ -277,16 +301,18 @@ class MemberConsumptionTask(BaseDwsTask):
|
||||
birthday
|
||||
FROM dwd.dim_member
|
||||
WHERE member_id IN (
|
||||
SELECT DISTINCT tenant_member_id
|
||||
SELECT DISTINCT member_id
|
||||
FROM dwd.dwd_settlement_head
|
||||
WHERE site_id = %s
|
||||
AND tenant_member_id IS NOT NULL
|
||||
AND tenant_member_id != 0
|
||||
AND member_id IS NOT NULL
|
||||
AND member_id != 0
|
||||
) AND scd2_is_current = 1
|
||||
"""
|
||||
try:
|
||||
rows = self.db.query(sql_with_fdw, (site_id,))
|
||||
except Exception as exc:
|
||||
# CHANGE [2026-02-24] FDW 查询失败后事务处于 failed 状态,必须先 rollback 再执行 fallback
|
||||
self.db.rollback()
|
||||
# FDW 连接失败,降级为仅使用 dim_member.birthday
|
||||
self.logger.warning(
|
||||
"%s: FDW 读取 member_birthday_manual 失败,降级为 dim_member.birthday — %s",
|
||||
@@ -352,6 +378,55 @@ class MemberConsumptionTask(BaseDwsTask):
|
||||
|
||||
return result
|
||||
|
||||
# CHANGE 2025-07-15 | task 4.1: 新增充值统计提取方法
|
||||
def _extract_recharge_stats(
|
||||
self,
|
||||
site_id: int,
|
||||
stat_date: date,
|
||||
) -> Dict[int, Dict[str, Any]]:
|
||||
"""
|
||||
从 dwd.dwd_recharge_order 提取 30/60/90 天充值统计
|
||||
|
||||
返回: {member_id: {count_30d, count_60d, count_90d,
|
||||
amount_30d, amount_60d, amount_90d}}
|
||||
"""
|
||||
sql = """
|
||||
SELECT
|
||||
member_id,
|
||||
COUNT(CASE WHEN DATE(pay_time) >= %s - INTERVAL '29 days' THEN 1 END) AS count_30d,
|
||||
COUNT(CASE WHEN DATE(pay_time) >= %s - INTERVAL '59 days' THEN 1 END) AS count_60d,
|
||||
COUNT(CASE WHEN DATE(pay_time) >= %s - INTERVAL '89 days' THEN 1 END) AS count_90d,
|
||||
COALESCE(SUM(CASE WHEN DATE(pay_time) >= %s - INTERVAL '29 days' THEN pay_amount ELSE 0 END), 0) AS amount_30d,
|
||||
COALESCE(SUM(CASE WHEN DATE(pay_time) >= %s - INTERVAL '59 days' THEN pay_amount ELSE 0 END), 0) AS amount_60d,
|
||||
COALESCE(SUM(CASE WHEN DATE(pay_time) >= %s - INTERVAL '89 days' THEN pay_amount ELSE 0 END), 0) AS amount_90d
|
||||
FROM dwd.dwd_recharge_order
|
||||
WHERE site_id = %s
|
||||
AND member_id IS NOT NULL
|
||||
AND member_id != 0
|
||||
AND pay_time IS NOT NULL
|
||||
AND DATE(pay_time) <= %s
|
||||
GROUP BY member_id
|
||||
"""
|
||||
params = (
|
||||
stat_date, stat_date, stat_date,
|
||||
stat_date, stat_date, stat_date,
|
||||
site_id, stat_date,
|
||||
)
|
||||
rows = self.db.query(sql, params)
|
||||
|
||||
result: Dict[int, Dict[str, Any]] = {}
|
||||
for row in (rows or []):
|
||||
rd = dict(row)
|
||||
result[rd['member_id']] = {
|
||||
'count_30d': rd.get('count_30d', 0),
|
||||
'count_60d': rd.get('count_60d', 0),
|
||||
'count_90d': rd.get('count_90d', 0),
|
||||
'amount_30d': self.safe_decimal(rd.get('amount_30d', 0)),
|
||||
'amount_60d': self.safe_decimal(rd.get('amount_60d', 0)),
|
||||
'amount_90d': self.safe_decimal(rd.get('amount_90d', 0)),
|
||||
}
|
||||
return result
|
||||
|
||||
# ==========================================================================
|
||||
# 工具方法
|
||||
# ==========================================================================
|
||||
|
||||
@@ -351,6 +351,8 @@ class MemberVisitTask(BaseDwsTask):
|
||||
try:
|
||||
rows = self.db.query(sql_with_fdw, (site_id,))
|
||||
except Exception as exc:
|
||||
# CHANGE [2026-02-24] FDW 查询失败后事务处于 failed 状态,必须先 rollback 再执行 fallback
|
||||
self.db.rollback()
|
||||
# FDW 连接失败,降级为仅使用 dim_member.birthday
|
||||
self.logger.warning(
|
||||
"%s: FDW 读取 member_birthday_manual 失败,降级为 dim_member.birthday — %s",
|
||||
|
||||
@@ -161,6 +161,8 @@ class BaseOdsTask(BaseTask):
|
||||
segment_keys: set[tuple] = set()
|
||||
# CHANGE 2026-02-18 | 收集 WINDOW 模式下 API 返回数据的实际最早时间戳
|
||||
segment_earliest_time: datetime | None = None
|
||||
# CHANGE [2026-02-24] 收集 API 返回数据的实际最晚时间戳,用于 late-cutoff 保护
|
||||
segment_latest_time: datetime | None = None
|
||||
|
||||
self.logger.info(
|
||||
"%s: 开始执行(%s/%s),窗口[%s ~ %s]",
|
||||
@@ -197,6 +199,13 @@ class BaseOdsTask(BaseTask):
|
||||
if page_earliest is not None:
|
||||
if segment_earliest_time is None or page_earliest < segment_earliest_time:
|
||||
segment_earliest_time = page_earliest
|
||||
# CHANGE [2026-02-24] 收集实际最晚时间戳,用于 late-cutoff 保护
|
||||
page_latest = self._collect_latest_time(
|
||||
page_records, snapshot_time_column
|
||||
)
|
||||
if page_latest is not None:
|
||||
if segment_latest_time is None or page_latest > segment_latest_time:
|
||||
segment_latest_time = page_latest
|
||||
inserted, updated, skipped = self._insert_records_schema_aware(
|
||||
table=spec.table_name,
|
||||
records=page_records,
|
||||
@@ -229,13 +238,27 @@ class BaseOdsTask(BaseTask):
|
||||
spec.code, seg_start, segment_earliest_time,
|
||||
)
|
||||
effective_window_start = segment_earliest_time
|
||||
# CHANGE [2026-02-24] late-cutoff 保护:用 API 实际最晚时间戳收窄软删除范围
|
||||
# 防止 recent endpoint 数据保留期滚动导致窗口尾部数据消失时误标删除
|
||||
effective_window_end = seg_end
|
||||
if (
|
||||
snapshot_protect_early_cutoff
|
||||
and snapshot_mode == SnapshotMode.WINDOW
|
||||
and segment_latest_time is not None
|
||||
and segment_latest_time < seg_end
|
||||
):
|
||||
self.logger.info(
|
||||
"%s: late-cutoff 保护生效,软删除窗口终点从 %s 收窄至 %s",
|
||||
spec.code, seg_end, segment_latest_time,
|
||||
)
|
||||
effective_window_end = segment_latest_time
|
||||
deleted = self._mark_missing_as_deleted(
|
||||
table=spec.table_name,
|
||||
business_pk_cols=business_pk_cols,
|
||||
snapshot_mode=snapshot_mode,
|
||||
snapshot_time_column=snapshot_time_column,
|
||||
window_start=effective_window_start,
|
||||
window_end=seg_end,
|
||||
window_end=effective_window_end,
|
||||
key_values=segment_keys,
|
||||
allow_empty=snapshot_allow_empty,
|
||||
)
|
||||
@@ -548,7 +571,39 @@ class BaseOdsTask(BaseTask):
|
||||
except (ValueError, TypeError, OverflowError):
|
||||
continue
|
||||
return earliest
|
||||
def _collect_latest_time(
|
||||
self, records: list, time_column: str
|
||||
) -> datetime | None:
|
||||
"""从一批 API 返回记录中提取 time_column 的最大值。
|
||||
|
||||
# CHANGE [2026-02-24] Prompt=诊断 2976396053006405 is_delete 误标
|
||||
# 用于 late-cutoff 保护:当 API recent endpoint 数据保留期滚动导致
|
||||
# 窗口尾部数据消失时,避免将尾部之后的数据误标为软删除。
|
||||
"""
|
||||
if not records or not time_column:
|
||||
return None
|
||||
latest: datetime | None = None
|
||||
for rec in records:
|
||||
if not isinstance(rec, dict):
|
||||
continue
|
||||
merged = self._merge_record_layers(rec)
|
||||
raw = self._get_value_case_insensitive(merged, time_column)
|
||||
if raw is None:
|
||||
continue
|
||||
try:
|
||||
if isinstance(raw, datetime):
|
||||
ts = raw
|
||||
elif isinstance(raw, str):
|
||||
ts = dtparser.parse(raw)
|
||||
else:
|
||||
continue
|
||||
if ts.tzinfo is None:
|
||||
ts = ts.replace(tzinfo=self.tz)
|
||||
if latest is None or ts > latest:
|
||||
latest = ts
|
||||
except (ValueError, TypeError, OverflowError):
|
||||
continue
|
||||
return latest
|
||||
|
||||
def _mark_missing_as_deleted(
|
||||
self,
|
||||
@@ -995,6 +1050,13 @@ class BaseOdsTask(BaseTask):
|
||||
updated += 1
|
||||
return inserted, updated
|
||||
|
||||
# goodsStockWarningInfo 嵌套字段 → ODS 扁平列名映射
|
||||
_STOCK_WARNING_FIELD_MAP: dict[str, str] = {
|
||||
"sales_day": "warning_sales_day",
|
||||
"warning_day_max": "warning_day_max",
|
||||
"warning_day_min": "warning_day_min",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _merge_record_layers(record: dict) -> dict:
|
||||
merged = record
|
||||
@@ -1005,6 +1067,13 @@ class BaseOdsTask(BaseTask):
|
||||
settle_inner = merged.get("settleList")
|
||||
if isinstance(settle_inner, dict):
|
||||
merged = {**settle_inner, **merged}
|
||||
# CHANGE 2026-02-24 | 扁平化 goodsStockWarningInfo 嵌套对象,
|
||||
# 将 sales_day/warning_day_max/warning_day_min 提升为顶层键
|
||||
warning_info = merged.get("goodsStockWarningInfo")
|
||||
if isinstance(warning_info, dict):
|
||||
for src_key, dst_key in BaseOdsTask._STOCK_WARNING_FIELD_MAP.items():
|
||||
if src_key in warning_info and dst_key not in merged:
|
||||
merged[dst_key] = warning_info[src_key]
|
||||
return merged
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -20,17 +20,24 @@ from orchestration.topological_sort import topological_sort
|
||||
@dataclass
|
||||
class _FakeMeta:
|
||||
depends_on: list[str] = field(default_factory=list)
|
||||
layer: str | None = None
|
||||
|
||||
|
||||
class _FakeRegistry:
|
||||
"""最小 Registry 替身,仅提供 get_metadata()。"""
|
||||
|
||||
def __init__(self, deps: dict[str, list[str]]):
|
||||
def __init__(self, deps: dict[str, list[str]], layers: dict[str, str] | None = None):
|
||||
self._deps = deps
|
||||
self._layers = layers or {}
|
||||
|
||||
def get_metadata(self, code: str):
|
||||
if code in self._deps:
|
||||
return _FakeMeta(depends_on=self._deps[code])
|
||||
return _FakeMeta(
|
||||
depends_on=self._deps[code],
|
||||
layer=self._layers.get(code),
|
||||
)
|
||||
if code in self._layers:
|
||||
return _FakeMeta(layer=self._layers[code])
|
||||
return _FakeMeta()
|
||||
|
||||
|
||||
@@ -140,3 +147,78 @@ class TestTopologicalSort:
|
||||
assert result.index("DWS_ASSISTANT_DAILY") < result.index("DWS_ASSISTANT_MONTHLY")
|
||||
# 所有任务在 MAINTENANCE 前
|
||||
assert result.index("DWS_MAINTENANCE") == len(result) - 1
|
||||
|
||||
# ── 跨层隐含依赖测试 ──────────────────────────────────────
|
||||
|
||||
def test_cross_layer_ods_before_dwd(self):
|
||||
"""ODS 任务应排在 DWD 任务之前(隐含层级依赖)。"""
|
||||
reg = _FakeRegistry(
|
||||
deps={"ODS_A": [], "DWD_LOAD": []},
|
||||
layers={"ODS_A": "ODS", "DWD_LOAD": "DWD"},
|
||||
)
|
||||
# 故意把 DWD 放前面
|
||||
result = topological_sort(["DWD_LOAD", "ODS_A"], reg)
|
||||
assert result.index("ODS_A") < result.index("DWD_LOAD")
|
||||
|
||||
def test_cross_layer_full_pipeline_order(self):
|
||||
"""全流程:ODS → DWD → DWS → INDEX,无论输入顺序如何。"""
|
||||
reg = _FakeRegistry(
|
||||
deps={"IDX": [], "DWS_A": [], "DWD_L": [], "ODS_X": []},
|
||||
layers={"ODS_X": "ODS", "DWD_L": "DWD", "DWS_A": "DWS", "IDX": "INDEX"},
|
||||
)
|
||||
# 故意倒序输入
|
||||
result = topological_sort(["IDX", "DWS_A", "DWD_L", "ODS_X"], reg)
|
||||
assert result.index("ODS_X") < result.index("DWD_L")
|
||||
assert result.index("DWD_L") < result.index("DWS_A")
|
||||
assert result.index("DWS_A") < result.index("IDX")
|
||||
|
||||
def test_cross_layer_multiple_tasks_per_layer(self):
|
||||
"""每层多个任务时,所有低层任务排在高层任务之前。"""
|
||||
reg = _FakeRegistry(
|
||||
deps={"ODS_A": [], "ODS_B": [], "DWD_X": [], "DWD_Y": []},
|
||||
layers={"ODS_A": "ODS", "ODS_B": "ODS", "DWD_X": "DWD", "DWD_Y": "DWD"},
|
||||
)
|
||||
result = topological_sort(["DWD_Y", "ODS_A", "DWD_X", "ODS_B"], reg)
|
||||
# 所有 ODS 在所有 DWD 之前
|
||||
for ods in ["ODS_A", "ODS_B"]:
|
||||
for dwd in ["DWD_X", "DWD_Y"]:
|
||||
assert result.index(ods) < result.index(dwd)
|
||||
|
||||
def test_cross_layer_with_explicit_deps_combined(self):
|
||||
"""隐含层级依赖 + 显式 depends_on 同时生效。"""
|
||||
reg = _FakeRegistry(
|
||||
deps={
|
||||
"ODS_A": [],
|
||||
"DWD_LOAD": [],
|
||||
"DWS_MONTHLY": ["DWS_DAILY"],
|
||||
"DWS_DAILY": [],
|
||||
},
|
||||
layers={
|
||||
"ODS_A": "ODS",
|
||||
"DWD_LOAD": "DWD",
|
||||
"DWS_DAILY": "DWS",
|
||||
"DWS_MONTHLY": "DWS",
|
||||
},
|
||||
)
|
||||
result = topological_sort(
|
||||
["DWS_MONTHLY", "DWS_DAILY", "DWD_LOAD", "ODS_A"], reg
|
||||
)
|
||||
# 层级:ODS < DWD < DWS
|
||||
assert result.index("ODS_A") < result.index("DWD_LOAD")
|
||||
assert result.index("DWD_LOAD") < result.index("DWS_DAILY")
|
||||
assert result.index("DWD_LOAD") < result.index("DWS_MONTHLY")
|
||||
# 显式依赖:DAILY < MONTHLY
|
||||
assert result.index("DWS_DAILY") < result.index("DWS_MONTHLY")
|
||||
|
||||
def test_tasks_without_layer_unaffected(self):
|
||||
"""无 layer 的任务不受层级排序影响,保持原有依赖关系。"""
|
||||
reg = _FakeRegistry(
|
||||
deps={"UTIL_A": [], "ODS_X": [], "DWD_L": []},
|
||||
layers={"ODS_X": "ODS", "DWD_L": "DWD"},
|
||||
# UTIL_A 无 layer
|
||||
)
|
||||
result = topological_sort(["UTIL_A", "DWD_L", "ODS_X"], reg)
|
||||
# ODS 仍在 DWD 之前
|
||||
assert result.index("ODS_X") < result.index("DWD_L")
|
||||
# UTIL_A 无层级约束,应在结果中
|
||||
assert "UTIL_A" in result
|
||||
|
||||
@@ -1,57 +1,142 @@
|
||||
# 小程序前端(miniprogram)
|
||||
# apps/miniprogram — 微信小程序
|
||||
|
||||
微信小程序前端项目,基于 **Donut 多端框架 + TDesign 组件库** 技术栈,为台球门店会员提供移动端服务入口。
|
||||
微信小程序前端项目,基于 Donut 多端框架 + TDesign 组件库,为台球门店会员提供移动端服务入口。
|
||||
|
||||
## 技术栈
|
||||
|
||||
- **框架**:微信小程序原生 + Donut 多端(`projectArchitecture: multiPlatform`)
|
||||
- **UI 组件**:TDesign 小程序版(`tdesign-miniprogram ^1.12.2`)
|
||||
- **语言**:TypeScript
|
||||
- **类型定义**:`miniprogram-api-typings`
|
||||
- 微信小程序原生 + Donut 多端(`projectArchitecture: multiPlatform`)
|
||||
- TDesign 小程序版(`tdesign-miniprogram ^1.12.2`)
|
||||
- TypeScript
|
||||
- 类型定义:`miniprogram-api-typings`
|
||||
|
||||
## 目录结构
|
||||
|
||||
```
|
||||
apps/miniprogram/
|
||||
├── miniprogram/ # 小程序主体代码
|
||||
│ ├── app.ts # 应用入口
|
||||
│ ├── app.json # 全局配置(页面路由、窗口、TabBar 等)
|
||||
│ ├── app.ts # 应用入口(wx.login 获取 code)
|
||||
│ ├── app.json # 全局配置(页面路由、窗口样式)
|
||||
│ ├── app.wxss # 全局样式
|
||||
│ ├── pages/ # 页面目录
|
||||
│ │ ├── mvp/ # MVP 全链路验证页
|
||||
│ │ ├── index/ # 首页
|
||||
│ │ └── logs/ # 日志页
|
||||
│ ├── utils/ # 工具函数
|
||||
│ │ ├── config.ts # 环境配置(API 地址自动切换)
|
||||
│ │ └── util.ts # 通用工具(日期格式化等)
|
||||
│ ├── miniprogram_npm/ # 构建后的 npm 包(TDesign 组件)
|
||||
│ ├── i18n/ # 国际化资源
|
||||
│ ├── miniapp/ # Donut 多端原生资源
|
||||
│ └── utils/ # 工具函数
|
||||
├── miniapp/ # 顶层 Donut 原生资源
|
||||
│ └── miniapp/ # Donut 多端原生资源
|
||||
├── typings/ # TypeScript 类型定义
|
||||
├── doc/ # 项目文档(PRD 等)
|
||||
├── i18n/ # 顶层国际化资源
|
||||
├── reports/ # 报表输出
|
||||
├── project.config.json # 微信开发者工具项目配置
|
||||
├── project.miniapp.json # Donut 多端配置
|
||||
├── tsconfig.json # TypeScript 编译配置
|
||||
├── package.json # npm 依赖声明
|
||||
└── README.md # 本文件
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## 开发指南
|
||||
|
||||
1. 使用 **微信开发者工具** 打开本目录(`apps/miniprogram/`)
|
||||
2. 首次打开后,在工具中执行"构建 npm"以生成 `miniprogram_npm/`
|
||||
3. AppID:`wx7c07793d82732921`
|
||||
### 环境准备
|
||||
|
||||
1. 安装微信开发者工具
|
||||
2. 打开本目录(`apps/miniprogram/`)
|
||||
3. 首次打开后,在工具中执行"构建 npm"以生成 `miniprogram_npm/`
|
||||
4. AppID:`wx7c07793d82732921`
|
||||
|
||||
### 页面路由
|
||||
|
||||
当前注册页面(`app.json`):
|
||||
|
||||
| 路径 | 说明 |
|
||||
|------|------|
|
||||
| `pages/mvp/mvp` | MVP 全链路验证(从后端读取测试数据) |
|
||||
| `pages/index/index` | 首页(待开发) |
|
||||
| `pages/logs/logs` | 日志页(框架默认) |
|
||||
|
||||
## 后端 API 集成
|
||||
|
||||
### API 地址配置
|
||||
|
||||
`utils/config.ts` 根据小程序运行环境自动切换 API 地址:
|
||||
|
||||
| 环境 | API 地址 |
|
||||
|------|----------|
|
||||
| develop(开发版) | `http://127.0.0.1:8000` |
|
||||
| trial(体验版) | `https://api.langlangzhuoqiu.cn` |
|
||||
| release(正式版) | `https://api.langlangzhuoqiu.cn` |
|
||||
|
||||
### 认证流程
|
||||
|
||||
小程序用户的完整生命周期:
|
||||
|
||||
```
|
||||
wx.login() 获取 code
|
||||
↓
|
||||
POST /api/xcx-auth/login → 获取 JWT(受限令牌)
|
||||
↓
|
||||
POST /api/xcx-auth/apply → 提交入驻申请(球房ID + 身份 + 手机号)
|
||||
↓
|
||||
管理员在后台审批
|
||||
↓
|
||||
GET /api/xcx-auth/status → 查询审批结果
|
||||
↓
|
||||
POST /api/xcx-auth/login → 重新登录获取完整令牌(含 site_id + roles)
|
||||
↓
|
||||
正常使用业务功能
|
||||
```
|
||||
|
||||
令牌类型:
|
||||
- 受限令牌(`limited=True`):pending 用户,仅可访问申请和状态查询端点
|
||||
- 完整令牌:approved 用户,包含 `user_id` + `site_id` + `roles`
|
||||
|
||||
### 关键 API 端点
|
||||
|
||||
| 端点 | 方法 | 说明 |
|
||||
|------|------|------|
|
||||
| `/api/xcx-auth/login` | POST | 微信登录(code → JWT) |
|
||||
| `/api/xcx-auth/apply` | POST | 提交入驻申请 |
|
||||
| `/api/xcx-auth/status` | GET | 查询用户状态和申请记录 |
|
||||
| `/api/xcx-auth/sites` | GET | 获取关联门店列表 |
|
||||
| `/api/xcx-auth/switch-site` | POST | 切换当前门店 |
|
||||
| `/api/xcx-auth/refresh` | POST | 刷新令牌 |
|
||||
| `/api/xcx-test` | GET | MVP 全链路验证 |
|
||||
|
||||
> 完整 API 文档见 [`apps/backend/docs/API-REFERENCE.md`](../backend/docs/API-REFERENCE.md)
|
||||
|
||||
## MVP 页面
|
||||
|
||||
`pages/mvp/mvp` 是全链路验证页面,从后端 `/api/xcx-test` 读取 `test."xcx-test"` 表数据并显示,用于验证:
|
||||
- 小程序 → 后端 API → 数据库 的完整链路
|
||||
- 网络请求、错误处理、加载状态
|
||||
|
||||
## 权限模型
|
||||
|
||||
小程序用户通过 RBAC 模型控制功能访问:
|
||||
|
||||
| 角色 | 可见功能 |
|
||||
|------|----------|
|
||||
| coach(助教) | 查看任务、助教看板 |
|
||||
| staff(员工) | 查看任务、数据看板 |
|
||||
| site_admin(店铺管理员) | 全部看板 |
|
||||
| tenant_admin(租户管理员) | 全部权限 |
|
||||
|
||||
多门店支持:用户可关联多个门店,通过 `/api/xcx-auth/switch-site` 切换。
|
||||
|
||||
## 与 Monorepo 的关系
|
||||
|
||||
- 本项目为独立前端工程,不参与 Python uv workspace
|
||||
- H5 原型设计稿位于 `docs/h5_ui/`(从原 `Prototype/` 目录迁移)
|
||||
- 未来将通过 FastAPI 后端(`apps/backend/`)与 ETL 数据层交互
|
||||
- 通过 FastAPI 后端(`apps/backend/`)与数据层交互
|
||||
- H5 原型设计稿位于 `docs/h5_ui/`
|
||||
- 认证数据存储在 `zqyy_app` 数据库的 `auth` Schema
|
||||
|
||||
## Roadmap
|
||||
|
||||
- [ ] 接入 FastAPI 后端 API,替换当前静态/模拟数据
|
||||
- [ ] TDesign 组件库升级至最新版本
|
||||
- [ ] 完善页面路由与业务功能(会员中心、助教预约、订单查询等)
|
||||
- [ ] 集成 CI/CD(代码检查、自动上传体验版)
|
||||
- [ ] 多门店支持(基于 `site_id` 的数据隔离)
|
||||
- [ ] 完善认证流程页面(登录 → 申请 → 等待审批 → 首页)
|
||||
- [ ] 数据看板页面(助教业绩、客户分析)
|
||||
- [ ] 会员中心页面
|
||||
- [ ] 助教预约功能
|
||||
- [ ] 订单查询功能
|
||||
- [ ] 多门店切换 UI
|
||||
- [ ] 消息通知(微信订阅消息)
|
||||
- [ ] CI/CD(代码检查、自动上传体验版)
|
||||
|
||||
21
db/README.md
21
db/README.md
@@ -1,9 +1,10 @@
|
||||
# db/ — 数据库资产目录
|
||||
|
||||
## 当前状态(2026-02-22 基线重置后)
|
||||
## 当前状态(2026-02-25 更新)
|
||||
|
||||
完整 DDL 基线已迁移至 `docs/database/ddl/`(按 schema 分文件,从测试库自动导出)。
|
||||
本目录保留运行时资产(种子数据、FDW 配置、建库脚本),历史文件已归档。
|
||||
本目录保留运行时资产(迁移脚本、种子数据、FDW 配置、建库脚本)。
|
||||
2026-02-22 基线重置前的旧迁移已归档至 `_archived/`;之后的新迁移仍在 `migrations/` 中。
|
||||
|
||||
## 目录结构
|
||||
|
||||
@@ -11,7 +12,7 @@
|
||||
db/
|
||||
├── etl_feiqiu/
|
||||
│ ├── schemas/ — 已清空(DDL 基线见 docs/database/ddl/etl_feiqiu__*.sql)
|
||||
│ ├── migrations/ — 已归档(全部变更已吸收进新 DDL 基线)
|
||||
│ ├── migrations/ — 基线重置后的增量迁移(重置前的已归档)
|
||||
│ ├── seeds/ — 种子数据(运行时需要)
|
||||
│ │ ├── seed_ods_tasks.sql
|
||||
│ │ ├── seed_scheduler_tasks.sql
|
||||
@@ -21,7 +22,7 @@ db/
|
||||
│ └── create_test_db.sql
|
||||
├── zqyy_app/
|
||||
│ ├── schemas/ — 已清空
|
||||
│ ├── migrations/ — 已归档
|
||||
│ ├── migrations/ — 基线重置后的增量迁移(含 auth 建表、FDW 配置、种子数据)
|
||||
│ ├── seeds/
|
||||
│ │ └── admin_web_seed.sql
|
||||
│ └── scripts/
|
||||
@@ -31,7 +32,7 @@ db/
|
||||
│ ├── setup_fdw_test.sql
|
||||
│ ├── setup_fdw_reverse.sql
|
||||
│ └── setup_fdw_reverse_test.sql
|
||||
└── _archived/ — 归档(旧 DDL + 迁移脚本,仅供历史参考)
|
||||
└── _archived/ — 归档(旧 DDL + 基线重置前的迁移脚本,仅供历史参考)
|
||||
└── ddl_baseline_2026-02-22/
|
||||
```
|
||||
|
||||
@@ -45,11 +46,13 @@ db/
|
||||
- `docs/database/ddl/etl_feiqiu__dws.sql`
|
||||
- `docs/database/ddl/etl_feiqiu__app.sql`(仅视图)
|
||||
- `docs/database/ddl/zqyy_app__public.sql`
|
||||
- `docs/database/ddl/fdw.sql`
|
||||
- `docs/database/ddl/zqyy_app__auth.sql`
|
||||
- `docs/database/ddl/fdw.sql`(仅正向映射;反向映射见 `db/fdw/setup_fdw_reverse*.sql`)
|
||||
|
||||
重新生成:`python scripts/ops/gen_consolidated_ddl.py`
|
||||
|
||||
## 未来迁移
|
||||
## 迁移管理
|
||||
|
||||
归档后,新的迁移脚本仍放 `migrations/`,文件名格式 `YYYY-MM-DD__描述.sql`。
|
||||
每次迁移执行后,建议重新运行 DDL 生成脚本刷新基线。
|
||||
新的迁移脚本放 `migrations/`,文件名格式 `YYYY-MM-DD__描述.sql`。
|
||||
每次迁移执行后,建议重新运行 DDL 生成脚本刷新基线:`python scripts/ops/gen_consolidated_ddl.py`
|
||||
种子数据类脚本(纯 INSERT/DELETE)放 `seeds/`,不放 `migrations/`。
|
||||
|
||||
@@ -845,6 +845,9 @@ CREATE TABLE IF NOT EXISTS dim_store_goods_ex (
|
||||
sort_order INTEGER,
|
||||
batch_stock_quantity NUMERIC(18,2), -- CHANGE 2026-02-21 | 修正类型:与 DB 实际一致(迁移脚本用 NUMERIC(18,2))
|
||||
time_slot_sale INTEGER, -- CHANGE 2026-02-21 | 新增:分时段销售标记
|
||||
warning_sales_day NUMERIC(18,2), -- CHANGE 2026-02-24 | 新增:库存预警日均销量
|
||||
warning_day_max INTEGER, -- CHANGE 2026-02-24 | 新增:预警天数上限
|
||||
warning_day_min INTEGER, -- CHANGE 2026-02-24 | 新增:预警天数下限
|
||||
SCD2_start_time TIMESTAMPTZ,
|
||||
SCD2_end_time TIMESTAMPTZ,
|
||||
SCD2_is_current INT,
|
||||
|
||||
@@ -1744,6 +1744,9 @@ CREATE TABLE IF NOT EXISTS ods.store_goods_master (
|
||||
commodity_code TEXT,
|
||||
not_sale INTEGER,
|
||||
time_slot_sale INTEGER, -- CHANGE 2026-02-21 | 新增:分时段销售标记(API 返回但此前未收录)
|
||||
warning_sales_day NUMERIC(18,2), -- CHANGE 2026-02-24 | 新增:库存预警日均销量(goodsStockWarningInfo.sales_day)
|
||||
warning_day_max INTEGER, -- CHANGE 2026-02-24 | 新增:预警天数上限(goodsStockWarningInfo.warning_day_max)
|
||||
warning_day_min INTEGER, -- CHANGE 2026-02-24 | 新增:预警天数下限(goodsStockWarningInfo.warning_day_min)
|
||||
payload JSONB NOT NULL,
|
||||
content_hash TEXT NOT NULL,
|
||||
source_file TEXT,
|
||||
|
||||
@@ -0,0 +1,139 @@
|
||||
-- =============================================================================
|
||||
-- FDW 外部表扩展 — DWS 层新表 + 已有表新增字段
|
||||
-- 目标库:test_zqyy_app(测试)/ zqyy_app(生产)
|
||||
-- 前提:ETL 库 app schema 已创建/更新对应 RLS 视图
|
||||
-- =============================================================================
|
||||
|
||||
-- -----------------------------------------------------------------------------
|
||||
-- 1. 新建:助教订单流水统计 FDW 外部表
|
||||
-- -----------------------------------------------------------------------------
|
||||
CREATE FOREIGN TABLE IF NOT EXISTS fdw_etl.v_dws_assistant_order_contribution (
|
||||
contribution_id BIGINT,
|
||||
site_id INTEGER,
|
||||
tenant_id INTEGER,
|
||||
assistant_id BIGINT,
|
||||
assistant_nickname VARCHAR(100),
|
||||
stat_date DATE,
|
||||
order_gross_revenue NUMERIC(14,2),
|
||||
order_net_revenue NUMERIC(14,2),
|
||||
time_weighted_revenue NUMERIC(14,2),
|
||||
time_weighted_net_revenue NUMERIC(14,2),
|
||||
order_count INTEGER,
|
||||
total_service_seconds INTEGER,
|
||||
created_at TIMESTAMP WITH TIME ZONE,
|
||||
updated_at TIMESTAMP WITH TIME ZONE
|
||||
) SERVER etl_server
|
||||
OPTIONS (schema_name 'app', table_name 'v_dws_assistant_order_contribution');
|
||||
|
||||
-- -----------------------------------------------------------------------------
|
||||
-- 2. 重建:会员消费汇总 FDW 外部表(新增充值窗口 + 次均消费字段)
|
||||
-- -----------------------------------------------------------------------------
|
||||
DROP FOREIGN TABLE IF EXISTS fdw_etl.v_dws_member_consumption_summary;
|
||||
|
||||
CREATE FOREIGN TABLE fdw_etl.v_dws_member_consumption_summary (
|
||||
id BIGINT,
|
||||
site_id BIGINT,
|
||||
tenant_id BIGINT,
|
||||
member_id BIGINT,
|
||||
stat_date DATE,
|
||||
member_nickname VARCHAR(100),
|
||||
member_mobile VARCHAR(20),
|
||||
card_grade_name VARCHAR(50),
|
||||
register_date DATE,
|
||||
first_consume_date DATE,
|
||||
last_consume_date DATE,
|
||||
total_visit_count INTEGER,
|
||||
total_consume_amount NUMERIC(14,2),
|
||||
total_recharge_amount NUMERIC(14,2),
|
||||
total_table_fee NUMERIC(14,2),
|
||||
total_goods_amount NUMERIC(14,2),
|
||||
total_assistant_amount NUMERIC(14,2),
|
||||
visit_count_7d INTEGER,
|
||||
visit_count_10d INTEGER,
|
||||
visit_count_15d INTEGER,
|
||||
visit_count_30d INTEGER,
|
||||
visit_count_60d INTEGER,
|
||||
visit_count_90d INTEGER,
|
||||
consume_amount_7d NUMERIC(14,2),
|
||||
consume_amount_10d NUMERIC(14,2),
|
||||
consume_amount_15d NUMERIC(14,2),
|
||||
consume_amount_30d NUMERIC(14,2),
|
||||
consume_amount_60d NUMERIC(14,2),
|
||||
consume_amount_90d NUMERIC(14,2),
|
||||
cash_card_balance NUMERIC(14,2),
|
||||
gift_card_balance NUMERIC(14,2),
|
||||
total_card_balance NUMERIC(14,2),
|
||||
days_since_last INTEGER,
|
||||
is_active_7d BOOLEAN,
|
||||
is_active_30d BOOLEAN,
|
||||
is_active_90d BOOLEAN,
|
||||
customer_tier VARCHAR(20),
|
||||
created_at TIMESTAMP WITH TIME ZONE,
|
||||
updated_at TIMESTAMP WITH TIME ZONE,
|
||||
recharge_count_30d INTEGER,
|
||||
recharge_count_60d INTEGER,
|
||||
recharge_count_90d INTEGER,
|
||||
recharge_amount_30d NUMERIC(14,2),
|
||||
recharge_amount_60d NUMERIC(14,2),
|
||||
recharge_amount_90d NUMERIC(14,2),
|
||||
avg_ticket_amount NUMERIC(14,2)
|
||||
) SERVER etl_server
|
||||
OPTIONS (schema_name 'app', table_name 'v_dws_member_consumption_summary');
|
||||
|
||||
|
||||
-- -----------------------------------------------------------------------------
|
||||
-- 3. 重建:助教日度明细 FDW 外部表(新增惩罚字段)
|
||||
-- -----------------------------------------------------------------------------
|
||||
DROP FOREIGN TABLE IF EXISTS fdw_etl.v_dws_assistant_daily_detail;
|
||||
|
||||
CREATE FOREIGN TABLE fdw_etl.v_dws_assistant_daily_detail (
|
||||
id BIGINT,
|
||||
site_id BIGINT,
|
||||
tenant_id BIGINT,
|
||||
assistant_id BIGINT,
|
||||
assistant_nickname VARCHAR(50),
|
||||
stat_date DATE,
|
||||
assistant_level_code INTEGER,
|
||||
assistant_level_name VARCHAR(20),
|
||||
total_service_count INTEGER,
|
||||
base_service_count INTEGER,
|
||||
bonus_service_count INTEGER,
|
||||
room_service_count INTEGER,
|
||||
total_seconds INTEGER,
|
||||
base_seconds INTEGER,
|
||||
bonus_seconds INTEGER,
|
||||
room_seconds INTEGER,
|
||||
total_hours NUMERIC(10,2),
|
||||
base_hours NUMERIC(10,2),
|
||||
bonus_hours NUMERIC(10,2),
|
||||
room_hours NUMERIC(10,2),
|
||||
total_ledger_amount NUMERIC(12,2),
|
||||
base_ledger_amount NUMERIC(12,2),
|
||||
bonus_ledger_amount NUMERIC(12,2),
|
||||
room_ledger_amount NUMERIC(12,2),
|
||||
unique_customers INTEGER,
|
||||
unique_tables INTEGER,
|
||||
trashed_seconds INTEGER,
|
||||
trashed_count INTEGER,
|
||||
created_at TIMESTAMP WITH TIME ZONE,
|
||||
updated_at TIMESTAMP WITH TIME ZONE,
|
||||
penalty_minutes NUMERIC(10,2),
|
||||
penalty_reason TEXT,
|
||||
is_exempt BOOLEAN,
|
||||
per_hour_contribution NUMERIC(14,2)
|
||||
) SERVER etl_server
|
||||
OPTIONS (schema_name 'app', table_name 'v_dws_assistant_daily_detail');
|
||||
|
||||
-- -----------------------------------------------------------------------------
|
||||
-- 授权
|
||||
-- -----------------------------------------------------------------------------
|
||||
GRANT SELECT ON fdw_etl.v_dws_assistant_order_contribution TO app_user;
|
||||
|
||||
-- =============================================================================
|
||||
-- 回滚脚本(按逆序执行)
|
||||
-- =============================================================================
|
||||
-- REVOKE SELECT ON fdw_etl.v_dws_assistant_order_contribution FROM app_user;
|
||||
-- DROP FOREIGN TABLE IF EXISTS fdw_etl.v_dws_assistant_daily_detail;
|
||||
-- DROP FOREIGN TABLE IF EXISTS fdw_etl.v_dws_member_consumption_summary;
|
||||
-- DROP FOREIGN TABLE IF EXISTS fdw_etl.v_dws_assistant_order_contribution;
|
||||
-- 然后重建旧版本的 v_dws_assistant_daily_detail 和 v_dws_member_consumption_summary(不含新字段)
|
||||
@@ -0,0 +1,67 @@
|
||||
-- =============================================================================
|
||||
-- 迁移脚本:扩展助教日度业绩明细表 — 新增定档折算惩罚字段
|
||||
-- 日期:2025-02-24
|
||||
-- 说明:在 dws.dws_assistant_daily_detail 中新增 penalty_minutes、penalty_reason、
|
||||
-- is_exempt、per_hour_contribution 四个字段,支撑定档折算惩罚检测与计算。
|
||||
-- 需求:5.1, 5.2
|
||||
-- =============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 1. ALTER TABLE:新增定档折算惩罚字段
|
||||
-- ---------------------------------------------------------------------------
|
||||
ALTER TABLE dws.dws_assistant_daily_detail
|
||||
ADD COLUMN IF NOT EXISTS penalty_minutes NUMERIC(10,2) DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS penalty_reason TEXT,
|
||||
ADD COLUMN IF NOT EXISTS is_exempt BOOLEAN DEFAULT FALSE,
|
||||
ADD COLUMN IF NOT EXISTS per_hour_contribution NUMERIC(14,2);
|
||||
|
||||
COMMENT ON COLUMN dws.dws_assistant_daily_detail.penalty_minutes
|
||||
IS '定档折算惩罚分钟数,无惩罚时为 0';
|
||||
COMMENT ON COLUMN dws.dws_assistant_daily_detail.penalty_reason
|
||||
IS '惩罚原因描述,无惩罚时为 NULL';
|
||||
COMMENT ON COLUMN dws.dws_assistant_daily_detail.is_exempt
|
||||
IS '是否豁免惩罚,TRUE 时跳过惩罚计算';
|
||||
COMMENT ON COLUMN dws.dws_assistant_daily_detail.per_hour_contribution
|
||||
IS '单人每小时贡献流水 = 台费每小时实收单价 / 本次基础课助教人数';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- =============================================================================
|
||||
-- 回滚脚本(如需撤销)
|
||||
-- =============================================================================
|
||||
-- ALTER TABLE dws.dws_assistant_daily_detail
|
||||
-- DROP COLUMN IF EXISTS penalty_minutes,
|
||||
-- DROP COLUMN IF EXISTS penalty_reason,
|
||||
-- DROP COLUMN IF EXISTS is_exempt,
|
||||
-- DROP COLUMN IF EXISTS per_hour_contribution;
|
||||
|
||||
-- =============================================================================
|
||||
-- 验证 SQL
|
||||
-- =============================================================================
|
||||
-- 1. 确认新增字段存在
|
||||
-- SELECT column_name, data_type, column_default
|
||||
-- FROM information_schema.columns
|
||||
-- WHERE table_schema = 'dws'
|
||||
-- AND table_name = 'dws_assistant_daily_detail'
|
||||
-- AND column_name IN ('penalty_minutes', 'penalty_reason', 'is_exempt', 'per_hour_contribution')
|
||||
-- ORDER BY column_name;
|
||||
-- 预期:4 行
|
||||
|
||||
-- 2. 确认字段类型正确
|
||||
-- SELECT column_name, data_type, numeric_precision, numeric_scale
|
||||
-- FROM information_schema.columns
|
||||
-- WHERE table_schema = 'dws'
|
||||
-- AND table_name = 'dws_assistant_daily_detail'
|
||||
-- AND column_name IN ('penalty_minutes', 'per_hour_contribution')
|
||||
-- ORDER BY column_name;
|
||||
-- 预期:penalty_minutes → numeric(10,2),per_hour_contribution → numeric(14,2)
|
||||
|
||||
-- 3. 确认 is_exempt 默认值
|
||||
-- SELECT column_name, column_default
|
||||
-- FROM information_schema.columns
|
||||
-- WHERE table_schema = 'dws'
|
||||
-- AND table_name = 'dws_assistant_daily_detail'
|
||||
-- AND column_name = 'is_exempt';
|
||||
-- 预期:column_default = 'false'
|
||||
@@ -0,0 +1,89 @@
|
||||
-- =============================================================================
|
||||
-- 迁移脚本:扩展会员消费汇总表 — 新增充值窗口和次均消费字段
|
||||
-- 日期:2025-02-24
|
||||
-- 说明:在 dws.dws_member_consumption_summary 中新增 30/60/90 天充值次数、
|
||||
-- 充值金额以及次均消费额度字段,支撑小程序客户看板展示。
|
||||
-- 需求:3.1, 3.2
|
||||
-- =============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 1. ALTER TABLE:新增充值窗口 + 次均消费字段
|
||||
-- ---------------------------------------------------------------------------
|
||||
ALTER TABLE dws.dws_member_consumption_summary
|
||||
ADD COLUMN IF NOT EXISTS recharge_count_30d INTEGER DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_count_60d INTEGER DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_count_90d INTEGER DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_amount_30d NUMERIC(14,2) DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_amount_60d NUMERIC(14,2) DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS recharge_amount_90d NUMERIC(14,2) DEFAULT 0,
|
||||
ADD COLUMN IF NOT EXISTS avg_ticket_amount NUMERIC(14,2) DEFAULT 0;
|
||||
|
||||
COMMENT ON COLUMN dws.dws_member_consumption_summary.recharge_count_30d
|
||||
IS '近 30 天充值次数';
|
||||
COMMENT ON COLUMN dws.dws_member_consumption_summary.recharge_count_60d
|
||||
IS '近 60 天充值次数';
|
||||
COMMENT ON COLUMN dws.dws_member_consumption_summary.recharge_count_90d
|
||||
IS '近 90 天充值次数';
|
||||
COMMENT ON COLUMN dws.dws_member_consumption_summary.recharge_amount_30d
|
||||
IS '近 30 天充值金额';
|
||||
COMMENT ON COLUMN dws.dws_member_consumption_summary.recharge_amount_60d
|
||||
IS '近 60 天充值金额';
|
||||
COMMENT ON COLUMN dws.dws_member_consumption_summary.recharge_amount_90d
|
||||
IS '近 90 天充值金额';
|
||||
COMMENT ON COLUMN dws.dws_member_consumption_summary.avg_ticket_amount
|
||||
IS '次均消费额度 = total_consume_amount / MAX(total_visit_count, 1)';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- =============================================================================
|
||||
-- 回滚脚本(如需撤销)
|
||||
-- =============================================================================
|
||||
-- ALTER TABLE dws.dws_member_consumption_summary
|
||||
-- DROP COLUMN IF EXISTS recharge_count_30d,
|
||||
-- DROP COLUMN IF EXISTS recharge_count_60d,
|
||||
-- DROP COLUMN IF EXISTS recharge_count_90d,
|
||||
-- DROP COLUMN IF EXISTS recharge_amount_30d,
|
||||
-- DROP COLUMN IF EXISTS recharge_amount_60d,
|
||||
-- DROP COLUMN IF EXISTS recharge_amount_90d,
|
||||
-- DROP COLUMN IF EXISTS avg_ticket_amount;
|
||||
|
||||
-- =============================================================================
|
||||
-- 验证 SQL
|
||||
-- =============================================================================
|
||||
-- 1. 确认新增字段存在
|
||||
-- SELECT column_name, data_type, column_default
|
||||
-- FROM information_schema.columns
|
||||
-- WHERE table_schema = 'dws'
|
||||
-- AND table_name = 'dws_member_consumption_summary'
|
||||
-- AND column_name IN (
|
||||
-- 'recharge_count_30d', 'recharge_count_60d', 'recharge_count_90d',
|
||||
-- 'recharge_amount_30d', 'recharge_amount_60d', 'recharge_amount_90d',
|
||||
-- 'avg_ticket_amount'
|
||||
-- )
|
||||
-- ORDER BY column_name;
|
||||
-- 预期:7 行
|
||||
|
||||
-- 2. 确认字段类型正确
|
||||
-- SELECT column_name, data_type, numeric_precision, numeric_scale
|
||||
-- FROM information_schema.columns
|
||||
-- WHERE table_schema = 'dws'
|
||||
-- AND table_name = 'dws_member_consumption_summary'
|
||||
-- AND column_name LIKE 'recharge_amount%'
|
||||
-- ORDER BY column_name;
|
||||
-- 预期:3 行,data_type = numeric, precision = 14, scale = 2
|
||||
|
||||
-- 3. 确认注释已设置
|
||||
-- SELECT c.column_name,
|
||||
-- pgd.description
|
||||
-- FROM information_schema.columns c
|
||||
-- JOIN pg_catalog.pg_statio_all_tables st
|
||||
-- ON c.table_schema = st.schemaname AND c.table_name = st.relname
|
||||
-- JOIN pg_catalog.pg_description pgd
|
||||
-- ON pgd.objoid = st.relid AND pgd.objsubid = c.ordinal_position
|
||||
-- WHERE c.table_schema = 'dws'
|
||||
-- AND c.table_name = 'dws_member_consumption_summary'
|
||||
-- AND c.column_name LIKE 'recharge%'
|
||||
-- ORDER BY c.column_name;
|
||||
-- 预期:6 行,description 非空
|
||||
@@ -0,0 +1,89 @@
|
||||
-- =============================================================================
|
||||
-- 迁移脚本:创建助教订单流水四项统计表
|
||||
-- 日期:2025-02-24
|
||||
-- 说明:新建 dws.dws_assistant_order_contribution 表,存储每名助教每日的
|
||||
-- 订单总流水、订单净流水、时效贡献流水、时效净贡献四项统计数据。
|
||||
-- 需求:1.1, 1.2, 1.3, 1.4, 1.5
|
||||
-- =============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 1. 建表:dws.dws_assistant_order_contribution
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE TABLE IF NOT EXISTS dws.dws_assistant_order_contribution (
|
||||
contribution_id BIGSERIAL PRIMARY KEY,
|
||||
site_id INTEGER NOT NULL,
|
||||
tenant_id INTEGER NOT NULL,
|
||||
assistant_id BIGINT NOT NULL,
|
||||
assistant_nickname VARCHAR(100),
|
||||
stat_date DATE NOT NULL,
|
||||
|
||||
-- 四项统计
|
||||
order_gross_revenue NUMERIC(14,2) DEFAULT 0, -- 订单总流水
|
||||
order_net_revenue NUMERIC(14,2) DEFAULT 0, -- 订单净流水
|
||||
time_weighted_revenue NUMERIC(14,2) DEFAULT 0, -- 时效贡献流水
|
||||
time_weighted_net_revenue NUMERIC(14,2) DEFAULT 0, -- 时效净贡献
|
||||
|
||||
-- 辅助字段
|
||||
order_count INTEGER DEFAULT 0, -- 参与订单数
|
||||
total_service_seconds INTEGER DEFAULT 0, -- 总服务时长(秒)
|
||||
|
||||
-- 元数据
|
||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
||||
);
|
||||
|
||||
COMMENT ON TABLE dws.dws_assistant_order_contribution
|
||||
IS '助教订单流水四项统计表,粒度 (site_id, assistant_id, stat_date)';
|
||||
|
||||
COMMENT ON COLUMN dws.dws_assistant_order_contribution.order_gross_revenue
|
||||
IS '订单总流水 = 台费 + 酒水食品 + 所有助教服务费';
|
||||
COMMENT ON COLUMN dws.dws_assistant_order_contribution.order_net_revenue
|
||||
IS '订单净流水 = 订单总流水 - 所有助教服务分成';
|
||||
COMMENT ON COLUMN dws.dws_assistant_order_contribution.time_weighted_revenue
|
||||
IS '时效贡献流水 = 台费按时长分摊 + 个人服务费 + 酒水食品按时长比例';
|
||||
COMMENT ON COLUMN dws.dws_assistant_order_contribution.time_weighted_net_revenue
|
||||
IS '时效净贡献 = 时效贡献流水 - 个人服务分成';
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 2. 唯一索引:确保 (site_id, assistant_id, stat_date) 唯一
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_aoc_site_assistant_date
|
||||
ON dws.dws_assistant_order_contribution (site_id, assistant_id, stat_date);
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 3. 查询索引:按门店+日期查询
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE INDEX IF NOT EXISTS idx_aoc_stat_date
|
||||
ON dws.dws_assistant_order_contribution (site_id, stat_date);
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- =============================================================================
|
||||
-- 回滚脚本(如需撤销)
|
||||
-- =============================================================================
|
||||
-- DROP INDEX IF EXISTS dws.idx_aoc_stat_date;
|
||||
-- DROP INDEX IF EXISTS dws.idx_aoc_site_assistant_date;
|
||||
-- DROP TABLE IF EXISTS dws.dws_assistant_order_contribution;
|
||||
|
||||
-- =============================================================================
|
||||
-- 验证 SQL
|
||||
-- =============================================================================
|
||||
-- 1. 确认表存在
|
||||
-- SELECT table_schema, table_name FROM information_schema.tables
|
||||
-- WHERE table_schema = 'dws' AND table_name = 'dws_assistant_order_contribution';
|
||||
-- 预期:1 行
|
||||
|
||||
-- 2. 确认字段完整
|
||||
-- SELECT column_name, data_type, column_default
|
||||
-- FROM information_schema.columns
|
||||
-- WHERE table_schema = 'dws' AND table_name = 'dws_assistant_order_contribution'
|
||||
-- ORDER BY ordinal_position;
|
||||
-- 预期:16 行
|
||||
|
||||
-- 3. 确认索引存在
|
||||
-- SELECT indexname FROM pg_indexes
|
||||
-- WHERE schemaname = 'dws' AND tablename = 'dws_assistant_order_contribution'
|
||||
-- ORDER BY indexname;
|
||||
-- 预期:3 行(PK + idx_aoc_site_assistant_date + idx_aoc_stat_date)
|
||||
@@ -0,0 +1,34 @@
|
||||
-- =============================================================================
|
||||
-- RLS 视图:助教订单流水统计 + 已有视图重建(包含新增字段)
|
||||
-- 目标库:test_etl_feiqiu(测试)/ etl_feiqiu(生产)
|
||||
-- 前提:dws.dws_assistant_order_contribution 表已创建;
|
||||
-- dws.dws_member_consumption_summary 已新增充值窗口字段;
|
||||
-- dws.dws_assistant_daily_detail 已新增惩罚字段
|
||||
-- =============================================================================
|
||||
|
||||
-- -----------------------------------------------------------------------------
|
||||
-- 1. 新建:助教订单流水统计 RLS 视图
|
||||
-- -----------------------------------------------------------------------------
|
||||
CREATE OR REPLACE VIEW app.v_dws_assistant_order_contribution AS
|
||||
SELECT * FROM dws.dws_assistant_order_contribution
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
GRANT SELECT ON app.v_dws_assistant_order_contribution TO app_reader;
|
||||
|
||||
-- -----------------------------------------------------------------------------
|
||||
-- 2. 重建已有视图(使用 SELECT * 以包含新增字段)
|
||||
-- 原视图使用显式字段列表,新增字段不会自动暴露
|
||||
-- -----------------------------------------------------------------------------
|
||||
CREATE OR REPLACE VIEW app.v_dws_member_consumption_summary AS
|
||||
SELECT * FROM dws.dws_member_consumption_summary
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_assistant_daily_detail AS
|
||||
SELECT * FROM dws.dws_assistant_daily_detail
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
-- =============================================================================
|
||||
-- 回滚脚本
|
||||
-- =============================================================================
|
||||
-- DROP VIEW IF EXISTS app.v_dws_assistant_order_contribution;
|
||||
-- 对于已有视图的回滚,需要用旧的显式字段列表重建(参见 2026-02-24__p1_create_app_schema_rls_views.sql)
|
||||
@@ -0,0 +1,78 @@
|
||||
-- 迁移:新增 goodsStockWarningInfo 嵌套字段到 ODS 和 DWD
|
||||
-- 日期:2026-02-24
|
||||
-- 关联:一致性检查报告发现 API 独有字段 goodsStockWarningInfo 未映射
|
||||
-- 字段来源:API store_goods_master -> goodsStockWarningInfo 嵌套对象
|
||||
-- - sales_day: 销售天数(用于库存预警计算)
|
||||
-- - warning_day_max: 预警天数上限
|
||||
-- - warning_day_min: 预警天数下限
|
||||
-- - site_goods_id / tenant_goods_id: 已有冗余字段,不重复收录
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- =============================================================================
|
||||
-- 1. ODS 层:ods.store_goods_master 新增 3 列
|
||||
-- =============================================================================
|
||||
ALTER TABLE ods.store_goods_master
|
||||
ADD COLUMN IF NOT EXISTS warning_sales_day NUMERIC(18,2),
|
||||
ADD COLUMN IF NOT EXISTS warning_day_max INTEGER,
|
||||
ADD COLUMN IF NOT EXISTS warning_day_min INTEGER;
|
||||
|
||||
COMMENT ON COLUMN ods.store_goods_master.warning_sales_day IS
|
||||
'【说明】库存预警参考的日均销量。 【ODS来源】store_goods_master - goodsStockWarningInfo.sales_day。 【JSON字段】store_goods_master.json - data.orderGoodsList - goodsStockWarningInfo.sales_day。';
|
||||
COMMENT ON COLUMN ods.store_goods_master.warning_day_max IS
|
||||
'【说明】库存预警天数上限。 【ODS来源】store_goods_master - goodsStockWarningInfo.warning_day_max。 【JSON字段】store_goods_master.json - data.orderGoodsList - goodsStockWarningInfo.warning_day_max。';
|
||||
COMMENT ON COLUMN ods.store_goods_master.warning_day_min IS
|
||||
'【说明】库存预警天数下限。 【ODS来源】store_goods_master - goodsStockWarningInfo.warning_day_min。 【JSON字段】store_goods_master.json - data.orderGoodsList - goodsStockWarningInfo.warning_day_min。';
|
||||
|
||||
-- =============================================================================
|
||||
-- 2. DWD 层:dwd.dim_store_goods_ex 新增 3 列
|
||||
-- =============================================================================
|
||||
ALTER TABLE dwd.dim_store_goods_ex
|
||||
ADD COLUMN IF NOT EXISTS warning_sales_day NUMERIC(18,2),
|
||||
ADD COLUMN IF NOT EXISTS warning_day_max INTEGER,
|
||||
ADD COLUMN IF NOT EXISTS warning_day_min INTEGER;
|
||||
|
||||
COMMENT ON COLUMN dwd.dim_store_goods_ex.warning_sales_day IS
|
||||
'【说明】库存预警参考的日均销量。 【ODS来源】store_goods_master - goodsStockWarningInfo.sales_day。 【JSON字段】store_goods_master.json - data.orderGoodsList - goodsStockWarningInfo.sales_day。';
|
||||
COMMENT ON COLUMN dwd.dim_store_goods_ex.warning_day_max IS
|
||||
'【说明】库存预警天数上限。 【ODS来源】store_goods_master - goodsStockWarningInfo.warning_day_max。 【JSON字段】store_goods_master.json - data.orderGoodsList - goodsStockWarningInfo.warning_day_max。';
|
||||
COMMENT ON COLUMN dwd.dim_store_goods_ex.warning_day_min IS
|
||||
'【说明】库存预警天数下限。 【ODS来源】store_goods_master - goodsStockWarningInfo.warning_day_min。 【JSON字段】store_goods_master.json - data.orderGoodsList - goodsStockWarningInfo.warning_day_min。';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- =============================================================================
|
||||
-- 回滚策略
|
||||
-- =============================================================================
|
||||
-- ALTER TABLE ods.store_goods_master
|
||||
-- DROP COLUMN IF EXISTS warning_sales_day,
|
||||
-- DROP COLUMN IF EXISTS warning_day_max,
|
||||
-- DROP COLUMN IF EXISTS warning_day_min;
|
||||
-- ALTER TABLE dwd.dim_store_goods_ex
|
||||
-- DROP COLUMN IF EXISTS warning_sales_day,
|
||||
-- DROP COLUMN IF EXISTS warning_day_max,
|
||||
-- DROP COLUMN IF EXISTS warning_day_min;
|
||||
|
||||
-- =============================================================================
|
||||
-- 验证 SQL
|
||||
-- =============================================================================
|
||||
-- 1. 确认 ODS 新列存在
|
||||
-- SELECT column_name, data_type FROM information_schema.columns
|
||||
-- WHERE table_schema = 'ods' AND table_name = 'store_goods_master'
|
||||
-- AND column_name IN ('warning_sales_day', 'warning_day_max', 'warning_day_min')
|
||||
-- ORDER BY column_name;
|
||||
-- 预期:3 行
|
||||
|
||||
-- 2. 确认 DWD 新列存在
|
||||
-- SELECT column_name, data_type FROM information_schema.columns
|
||||
-- WHERE table_schema = 'dwd' AND table_name = 'dim_store_goods_ex'
|
||||
-- AND column_name IN ('warning_sales_day', 'warning_day_max', 'warning_day_min')
|
||||
-- ORDER BY column_name;
|
||||
-- 预期:3 行
|
||||
|
||||
-- 3. 确认注释已设置
|
||||
-- SELECT col_description(
|
||||
-- (SELECT oid FROM pg_class WHERE relname = 'store_goods_master' AND relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'ods')),
|
||||
-- (SELECT ordinal_position FROM information_schema.columns WHERE table_schema = 'ods' AND table_name = 'store_goods_master' AND column_name = 'warning_sales_day')
|
||||
-- );
|
||||
-- 预期:非空
|
||||
@@ -0,0 +1,21 @@
|
||||
-- 迁移:清理 ODS_ASSISTANT_ABOLISH 残留元数据
|
||||
-- 日期:2026-02-24
|
||||
-- 关联:联调报告发现 meta.etl_task 中仍有 ODS_ASSISTANT_ABOLISH 注册,
|
||||
-- 导致调度器尝试执行已删除的任务并报 ValueError
|
||||
-- 幂等:DELETE WHERE 不存在时无影响
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- 1. 清理 meta.etl_task 中的 ODS_ASSISTANT_ABOLISH 注册
|
||||
DELETE FROM meta.etl_task WHERE task_code = 'ODS_ASSISTANT_ABOLISH';
|
||||
|
||||
-- 2. 清理 meta.etl_task 中的旧式 ASSISTANT_ABOLISH 注册(seed_scheduler_tasks.sql 残留)
|
||||
DELETE FROM meta.etl_task WHERE task_code = 'ASSISTANT_ABOLISH';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- =============================================================================
|
||||
-- 验证 SQL
|
||||
-- =============================================================================
|
||||
-- SELECT * FROM meta.etl_task WHERE task_code IN ('ODS_ASSISTANT_ABOLISH', 'ASSISTANT_ABOLISH');
|
||||
-- 预期:0 行
|
||||
@@ -0,0 +1,196 @@
|
||||
-- =============================================================================
|
||||
-- 迁移脚本:创建 app Schema、RLS 视图层与 app_reader 角色
|
||||
-- 日期:2026-02-24
|
||||
-- 目标库:test_etl_feiqiu(通过 PG_DSN 连接)
|
||||
-- 说明:为 DWD/DWS 层共 35 张表创建带 site_id 行级过滤的 RLS 视图,
|
||||
-- 供业务库通过 postgres_fdw 只读访问。
|
||||
-- cfg_* 配置表无 site_id 列,视图直接 SELECT * 不加过滤。
|
||||
-- dim_member / dim_member_card_account 使用 register_site_id 列过滤。
|
||||
-- dim_staff_ex 无 site_id 列,视图直接 SELECT * 不加过滤。
|
||||
-- 需求:2.1, 2.2, 2.3, 2.4, 2.7, 2.8, 4.1, 4.3, 4.4, 4.5, 4.6
|
||||
-- =============================================================================
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 1. 创建 app Schema
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE SCHEMA IF NOT EXISTS app;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 2. 创建 app_reader 只读角色(条件创建)
|
||||
-- ---------------------------------------------------------------------------
|
||||
DO $$ BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = 'app_reader') THEN
|
||||
CREATE ROLE app_reader LOGIN;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 3. DWD 层 RLS 视图(11 张,全部含 site_id 过滤)
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- dim_member 使用 register_site_id 而非 site_id
|
||||
CREATE OR REPLACE VIEW app.v_dim_member AS
|
||||
SELECT * FROM dwd.dim_member
|
||||
WHERE register_site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dim_assistant AS
|
||||
SELECT * FROM dwd.dim_assistant
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
-- dim_member_card_account 使用 register_site_id 而非 site_id
|
||||
CREATE OR REPLACE VIEW app.v_dim_member_card_account AS
|
||||
SELECT * FROM dwd.dim_member_card_account
|
||||
WHERE register_site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dim_table AS
|
||||
SELECT * FROM dwd.dim_table
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dwd_settlement_head AS
|
||||
SELECT * FROM dwd.dwd_settlement_head
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dwd_table_fee_log AS
|
||||
SELECT * FROM dwd.dwd_table_fee_log
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dwd_assistant_service_log AS
|
||||
SELECT * FROM dwd.dwd_assistant_service_log
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dwd_recharge_order AS
|
||||
SELECT * FROM dwd.dwd_recharge_order
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dwd_store_goods_sale AS
|
||||
SELECT * FROM dwd.dwd_store_goods_sale
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dim_staff AS
|
||||
SELECT * FROM dwd.dim_staff
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
-- dim_staff_ex 无 site_id 列,直接 SELECT * 不加过滤
|
||||
CREATE OR REPLACE VIEW app.v_dim_staff_ex AS
|
||||
SELECT * FROM dwd.dim_staff_ex;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 4. DWS 层 RLS 视图 — 含 site_id 过滤(20 张)
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE OR REPLACE VIEW app.v_dws_member_consumption_summary AS
|
||||
SELECT * FROM dws.dws_member_consumption_summary
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_member_visit_detail AS
|
||||
SELECT * FROM dws.dws_member_visit_detail
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_member_winback_index AS
|
||||
SELECT * FROM dws.dws_member_winback_index
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_member_newconv_index AS
|
||||
SELECT * FROM dws.dws_member_newconv_index
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_member_recall_index AS
|
||||
SELECT * FROM dws.dws_member_recall_index
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_member_assistant_relation_index AS
|
||||
SELECT * FROM dws.dws_member_assistant_relation_index
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_member_assistant_intimacy AS
|
||||
SELECT * FROM dws.dws_member_assistant_intimacy
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_assistant_daily_detail AS
|
||||
SELECT * FROM dws.dws_assistant_daily_detail
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_assistant_monthly_summary AS
|
||||
SELECT * FROM dws.dws_assistant_monthly_summary
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_assistant_salary_calc AS
|
||||
SELECT * FROM dws.dws_assistant_salary_calc
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_assistant_customer_stats AS
|
||||
SELECT * FROM dws.dws_assistant_customer_stats
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_assistant_finance_analysis AS
|
||||
SELECT * FROM dws.dws_assistant_finance_analysis
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_finance_daily_summary AS
|
||||
SELECT * FROM dws.dws_finance_daily_summary
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_finance_income_structure AS
|
||||
SELECT * FROM dws.dws_finance_income_structure
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_finance_recharge_summary AS
|
||||
SELECT * FROM dws.dws_finance_recharge_summary
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_finance_discount_detail AS
|
||||
SELECT * FROM dws.dws_finance_discount_detail
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_finance_expense_summary AS
|
||||
SELECT * FROM dws.dws_finance_expense_summary
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_platform_settlement AS
|
||||
SELECT * FROM dws.dws_platform_settlement
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_assistant_recharge_commission AS
|
||||
SELECT * FROM dws.dws_assistant_recharge_commission
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_dws_order_summary AS
|
||||
SELECT * FROM dws.dws_order_summary
|
||||
WHERE site_id = current_setting('app.current_site_id')::bigint;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 5. DWS 层 cfg_* 配置表视图(4 张,无 site_id,直接 SELECT *)
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE OR REPLACE VIEW app.v_cfg_performance_tier AS
|
||||
SELECT * FROM dws.cfg_performance_tier;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_cfg_assistant_level_price AS
|
||||
SELECT * FROM dws.cfg_assistant_level_price;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_cfg_bonus_rules AS
|
||||
SELECT * FROM dws.cfg_bonus_rules;
|
||||
|
||||
CREATE OR REPLACE VIEW app.v_cfg_index_parameters AS
|
||||
SELECT * FROM dws.cfg_index_parameters;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 6. P2 预留(待 P2 完成后取消注释并创建视图)
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- TODO [P2] dws.dws_member_spending_power_index → app.v_dws_member_spending_power_index
|
||||
-- TODO [P2] dws.dws_assistant_order_contribution → app.v_dws_assistant_order_contribution
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 7. 授权:app_reader 对 app Schema 的只读访问
|
||||
-- ---------------------------------------------------------------------------
|
||||
GRANT USAGE ON SCHEMA app TO app_reader;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA app TO app_reader;
|
||||
|
||||
-- 未来新建视图自动授权
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA app GRANT SELECT ON TABLES TO app_reader;
|
||||
|
||||
-- =============================================================================
|
||||
-- 回滚脚本(按逆序执行)
|
||||
-- =============================================================================
|
||||
-- ALTER DEFAULT PRIVILEGES IN SCHEMA app REVOKE SELECT ON TABLES FROM app_reader;
|
||||
-- REVOKE SELECT ON ALL TABLES IN SCHEMA app FROM app_reader;
|
||||
-- REVOKE USAGE ON SCHEMA app FROM app_reader;
|
||||
-- DROP SCHEMA IF EXISTS app CASCADE;
|
||||
-- DROP ROLE IF EXISTS app_reader;
|
||||
@@ -9,7 +9,7 @@ task_codes AS (
|
||||
-- Must match tasks/ods_tasks.py (ENABLED_ODS_CODES)
|
||||
'ODS_ASSISTANT_ACCOUNT',
|
||||
'ODS_ASSISTANT_LEDGER',
|
||||
'ODS_ASSISTANT_ABOLISH',
|
||||
-- CHANGE [2026-02-24] intent: 移除 ODS_ASSISTANT_ABOLISH(全链路已清理,表已删除)
|
||||
'ODS_SETTLEMENT_RECORDS',
|
||||
'ODS_TABLE_USE',
|
||||
'ODS_PAYMENT',
|
||||
|
||||
@@ -14,7 +14,7 @@ WITH target_store AS (
|
||||
),
|
||||
task_codes AS (
|
||||
SELECT unnest(ARRAY[
|
||||
'ASSISTANT_ABOLISH',
|
||||
-- CHANGE [2026-02-24] intent: 移除 ASSISTANT_ABOLISH(全链路已清理)
|
||||
'ASSISTANTS',
|
||||
'COUPON_USAGE',
|
||||
'CHECK_CUTOFF',
|
||||
|
||||
164
db/zqyy_app/README.md
Normal file
164
db/zqyy_app/README.md
Normal file
@@ -0,0 +1,164 @@
|
||||
# db/zqyy_app — 业务数据库
|
||||
|
||||
`zqyy_app` 是 NeoZQYY 的业务数据库,存储用户认证、任务队列、调度配置、执行日志等数据。
|
||||
测试库:`test_zqyy_app`(开发和测试环境默认连接)。
|
||||
|
||||
## Schema 架构
|
||||
|
||||
| Schema | 用途 | 状态 |
|
||||
|--------|------|------|
|
||||
| `auth` | 用户认证与权限(微信用户、角色、权限、申请、绑定) | 已建表 |
|
||||
| `biz` | 业务数据(预留,未来存储门店业务数据) | 已创建,待建表 |
|
||||
| `public` | 管理后台(admin_users、task_queue、scheduled_tasks、task_execution_log) | 已建表 |
|
||||
|
||||
## auth Schema — 认证系统(8 张表)
|
||||
|
||||
### auth.users — 微信用户主表
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| id | SERIAL PK | 自增主键 |
|
||||
| wx_openid | VARCHAR(128) UNIQUE | 微信 OpenID |
|
||||
| wx_union_id | VARCHAR(128) | 微信 UnionID |
|
||||
| wx_avatar_url | VARCHAR(512) | 头像 URL |
|
||||
| nickname | VARCHAR(50) | 昵称 |
|
||||
| phone | VARCHAR(20) | 手机号 |
|
||||
| status | VARCHAR(20) | 状态:pending / approved / rejected / disabled |
|
||||
| created_at | TIMESTAMPTZ | 创建时间 |
|
||||
| updated_at | TIMESTAMPTZ | 更新时间 |
|
||||
|
||||
### auth.site_code_mapping — 球房ID与门店映射
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| id | SERIAL PK | 自增主键 |
|
||||
| site_code | VARCHAR(10) UNIQUE | 球房ID(格式:2字母+3数字,如 AB123) |
|
||||
| site_id | BIGINT UNIQUE | 门店 ID(对应 ETL 库的 site_id) |
|
||||
| site_name | VARCHAR(100) | 门店名称 |
|
||||
| tenant_id | INT | 租户 ID |
|
||||
|
||||
### auth.roles — 角色定义
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| id | SERIAL PK | 自增主键 |
|
||||
| code | VARCHAR(50) UNIQUE | 角色编码 |
|
||||
| name | VARCHAR(100) | 角色名称 |
|
||||
| description | TEXT | 描述 |
|
||||
|
||||
预置角色:
|
||||
| code | name | 权限 |
|
||||
|------|------|------|
|
||||
| `coach` | 助教 | view_tasks, view_board_coach |
|
||||
| `staff` | 员工 | view_tasks, view_board |
|
||||
| `site_admin` | 店铺管理员 | 全部 5 个权限 |
|
||||
| `tenant_admin` | 租户管理员 | 全部 5 个权限 |
|
||||
|
||||
### auth.permissions — 权限定义
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| id | SERIAL PK | 自增主键 |
|
||||
| code | VARCHAR(50) UNIQUE | 权限编码 |
|
||||
| name | VARCHAR(100) | 权限名称 |
|
||||
| description | TEXT | 描述 |
|
||||
|
||||
预置权限:
|
||||
| code | name |
|
||||
|------|------|
|
||||
| `view_tasks` | 查看任务 |
|
||||
| `view_board` | 查看看板 |
|
||||
| `view_board_finance` | 查看财务看板 |
|
||||
| `view_board_customer` | 查看客户看板 |
|
||||
| `view_board_coach` | 查看助教看板 |
|
||||
|
||||
### auth.role_permissions — 角色-权限关联
|
||||
联合主键 `(role_id, permission_id)`,外键级联删除。
|
||||
|
||||
### auth.user_applications — 用户入驻申请
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| id | SERIAL PK | 自增主键 |
|
||||
| user_id | INT FK | 关联 auth.users |
|
||||
| site_code | VARCHAR(10) | 球房ID |
|
||||
| site_id | BIGINT | 门店 ID(后端自动填充) |
|
||||
| applied_role_text | VARCHAR(100) | 申请身份文本 |
|
||||
| employee_number | VARCHAR(50) | 员工编号 |
|
||||
| phone | VARCHAR(20) | 手机号 |
|
||||
| status | VARCHAR(20) | pending / approved / rejected |
|
||||
| reviewer_id | INT | 审核人 |
|
||||
| review_note | TEXT | 审核备注 |
|
||||
| created_at | TIMESTAMPTZ | 申请时间 |
|
||||
| reviewed_at | TIMESTAMPTZ | 审核时间 |
|
||||
|
||||
### auth.user_site_roles — 用户-门店-角色关联
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| id | SERIAL PK | 自增主键 |
|
||||
| user_id | INT FK | 关联 auth.users |
|
||||
| site_id | BIGINT | 门店 ID |
|
||||
| role_id | INT FK | 关联 auth.roles |
|
||||
|
||||
唯一约束:`(user_id, site_id, role_id)` — 同一用户在同一门店下不能重复分配同一角色。
|
||||
|
||||
### auth.user_assistant_binding — 用户-人员绑定
|
||||
| 字段 | 类型 | 说明 |
|
||||
|------|------|------|
|
||||
| id | SERIAL PK | 自增主键 |
|
||||
| user_id | INT FK | 关联 auth.users |
|
||||
| site_id | BIGINT | 门店 ID |
|
||||
| assistant_id | BIGINT | 助教 ID(ETL 库) |
|
||||
| staff_id | BIGINT | 员工 ID(ETL 库) |
|
||||
| binding_type | VARCHAR(20) | 绑定类型 |
|
||||
|
||||
## public Schema — 管理后台
|
||||
|
||||
### admin_users — 管理后台用户
|
||||
用于管理后台(`apps/admin-web/`)的用户名密码登录。
|
||||
|
||||
默认种子数据:`admin / admin123`(生产环境部署后务必修改)。
|
||||
|
||||
### task_queue — 任务执行队列
|
||||
存储待执行和执行中的 ETL 任务,按 `site_id` 隔离。
|
||||
|
||||
### scheduled_tasks — 调度任务配置
|
||||
存储定时调度规则,由后端 `Scheduler` 服务消费。
|
||||
|
||||
### task_execution_log — 任务执行日志
|
||||
记录每次 ETL 任务执行的状态、耗时、日志输出。
|
||||
|
||||
## FDW 跨库访问
|
||||
|
||||
`zqyy_app` 通过 Foreign Data Wrapper(FDW)只读访问 `etl_feiqiu` 数据库:
|
||||
- 迁移脚本:`2026-02-24__p1_setup_fdw_etl.sql`
|
||||
- 用途:小程序认证时的人员匹配(查询 ETL 库中的助教/员工记录)
|
||||
- 安全:FDW 连接使用只读用户
|
||||
|
||||
## 目录结构
|
||||
|
||||
```
|
||||
db/zqyy_app/
|
||||
├── migrations/ # 迁移脚本(日期前缀)
|
||||
│ ├── 2026-02-24__p1_create_auth_biz_schemas.sql # 创建 auth + biz Schema
|
||||
│ ├── 2026-02-24__p1_setup_fdw_etl.sql # 设置 FDW 跨库访问
|
||||
│ ├── 2026-02-25__p3_create_auth_tables.sql # 创建 auth 8 张表
|
||||
│ └── 2026-02-25__p3_seed_roles_permissions.sql # 预置角色和权限
|
||||
├── schemas/ # Schema DDL(待补充)
|
||||
├── seeds/
|
||||
│ └── admin_web_seed.sql # 管理后台默认管理员
|
||||
├── scripts/
|
||||
│ └── create_test_db.sql # 创建测试库脚本
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## 迁移执行顺序
|
||||
|
||||
1. `p1_create_auth_biz_schemas.sql` — 创建 Schema
|
||||
2. `p1_setup_fdw_etl.sql` — 设置 FDW
|
||||
3. `p3_create_auth_tables.sql` — 创建认证表
|
||||
4. `p3_seed_roles_permissions.sql` — 插入种子数据
|
||||
|
||||
所有迁移脚本使用 `IF NOT EXISTS` / `ON CONFLICT DO NOTHING` 幂等语法,可重复执行。
|
||||
|
||||
## 与其他模块的关系
|
||||
|
||||
- `apps/backend/` — 通过 `get_connection()` 读写此库
|
||||
- `apps/miniprogram/` — 通过后端 API 间接访问
|
||||
- `apps/admin-web/` — 通过后端 API 间接访问
|
||||
- `db/etl_feiqiu/` — 通过 FDW 被此库只读引用
|
||||
@@ -0,0 +1,53 @@
|
||||
-- =============================================================================
|
||||
-- 迁移脚本:创建 auth/biz Schema 与 app_user 权限配置
|
||||
-- 日期:2026-02-24
|
||||
-- 目标库:test_zqyy_app(通过 APP_DB_DSN 连接)
|
||||
-- 说明:在业务库中创建 auth(用户认证)和 biz(业务数据)两个 Schema,
|
||||
-- 并授予 app_user 角色完整的 CRUD 权限(含未来新表自动授权)。
|
||||
-- 不操作 public Schema,保留其中现有系统管理表不受影响。
|
||||
-- 前提:app_user 角色已由 DBA 预创建
|
||||
-- 需求:1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 4.2, 4.3, 4.4, 4.5, 4.6
|
||||
-- =============================================================================
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 1. 创建 auth Schema(用户认证、权限、映射)
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE SCHEMA IF NOT EXISTS auth;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 2. 创建 biz Schema(业务数据)
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE SCHEMA IF NOT EXISTS biz;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 3. 授予 app_user 对 auth Schema 的 USAGE + CRUD 权限
|
||||
-- ---------------------------------------------------------------------------
|
||||
GRANT USAGE ON SCHEMA auth TO app_user;
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA auth TO app_user;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 4. 授予 app_user 对 biz Schema 的 USAGE + CRUD 权限
|
||||
-- ---------------------------------------------------------------------------
|
||||
GRANT USAGE ON SCHEMA biz TO app_user;
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA biz TO app_user;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 5. 设置 ALTER DEFAULT PRIVILEGES:未来新表自动授权
|
||||
-- ---------------------------------------------------------------------------
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA auth
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO app_user;
|
||||
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA biz
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO app_user;
|
||||
|
||||
-- =============================================================================
|
||||
-- 回滚脚本(按逆序执行)
|
||||
-- =============================================================================
|
||||
-- ALTER DEFAULT PRIVILEGES IN SCHEMA biz REVOKE SELECT, INSERT, UPDATE, DELETE ON TABLES FROM app_user;
|
||||
-- ALTER DEFAULT PRIVILEGES IN SCHEMA auth REVOKE SELECT, INSERT, UPDATE, DELETE ON TABLES FROM app_user;
|
||||
-- REVOKE SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA biz FROM app_user;
|
||||
-- REVOKE USAGE ON SCHEMA biz FROM app_user;
|
||||
-- REVOKE SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA auth FROM app_user;
|
||||
-- REVOKE USAGE ON SCHEMA auth FROM app_user;
|
||||
-- DROP SCHEMA IF EXISTS biz CASCADE;
|
||||
-- DROP SCHEMA IF EXISTS auth CASCADE;
|
||||
71
db/zqyy_app/migrations/2026-02-24__p1_setup_fdw_etl.sql
Normal file
71
db/zqyy_app/migrations/2026-02-24__p1_setup_fdw_etl.sql
Normal file
@@ -0,0 +1,71 @@
|
||||
-- =============================================================================
|
||||
-- 迁移脚本:配置 FDW 跨库映射(ETL 库 → 业务库)
|
||||
-- 日期:2026-02-24
|
||||
-- 目标库:test_zqyy_app(通过 APP_DB_DSN 连接)
|
||||
-- 说明:通过 postgres_fdw 将 ETL 库 app Schema 的 RLS 视图映射为业务库
|
||||
-- fdw_etl Schema 的只读外部表,使后端无需直连 ETL 库即可读取汇总/维度数据。
|
||||
-- 前提:
|
||||
-- 1. ETL 库已部署 app Schema 及 RLS 视图(2026-02-24__p1_create_app_schema_rls_views.sql)
|
||||
-- 2. ETL 库已创建 app_reader 只读角色
|
||||
-- 3. 业务库已创建 app_user 角色
|
||||
-- 需求:3.1, 3.2, 3.3, 3.4, 3.7, 4.2, 4.3, 4.4, 4.5, 4.6
|
||||
-- =============================================================================
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 1. 安装 postgres_fdw 扩展
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE EXTENSION IF NOT EXISTS postgres_fdw;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 2. 创建外部服务器(指向 ETL 库)
|
||||
-- host / dbname / port 使用占位符 '***',部署时按环境替换
|
||||
-- 服务器名使用通用名(不含环境前缀),通过连接参数区分环境
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE SERVER IF NOT EXISTS etl_feiqiu_server
|
||||
FOREIGN DATA WRAPPER postgres_fdw
|
||||
OPTIONS (host '***', dbname '***', port '***');
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 3. 创建用户映射(只读角色)
|
||||
-- app_user = test_zqyy_app 侧的应用连接角色
|
||||
-- app_reader = ETL 库侧的只读角色
|
||||
-- 密码占位符 '***',部署时替换为真实凭据
|
||||
-- ---------------------------------------------------------------------------
|
||||
CREATE USER MAPPING IF NOT EXISTS FOR app_user
|
||||
SERVER etl_feiqiu_server
|
||||
OPTIONS (user 'app_reader', password '***');
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 4. 创建 fdw_etl Schema(幂等处理:先 DROP 再重建)
|
||||
-- IMPORT FOREIGN SCHEMA 不是幂等的(外部表已存在会报错),
|
||||
-- 因此采用 DROP CASCADE + 重建的方式确保可重复执行。
|
||||
-- ---------------------------------------------------------------------------
|
||||
DROP SCHEMA IF EXISTS fdw_etl CASCADE;
|
||||
CREATE SCHEMA fdw_etl;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 5. 批量导入 ETL 库 app Schema 的所有外部表到 fdw_etl
|
||||
-- ---------------------------------------------------------------------------
|
||||
IMPORT FOREIGN SCHEMA app
|
||||
FROM SERVER etl_feiqiu_server
|
||||
INTO fdw_etl;
|
||||
|
||||
-- ---------------------------------------------------------------------------
|
||||
-- 6. 授权:允许 app_user 访问 fdw_etl Schema 及其外部表
|
||||
-- ---------------------------------------------------------------------------
|
||||
GRANT USAGE ON SCHEMA fdw_etl TO app_user;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA fdw_etl TO app_user;
|
||||
|
||||
-- 未来新导入的外部表自动授权
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA fdw_etl GRANT SELECT ON TABLES TO app_user;
|
||||
|
||||
-- =============================================================================
|
||||
-- 回滚脚本(按逆序执行)
|
||||
-- =============================================================================
|
||||
-- ALTER DEFAULT PRIVILEGES IN SCHEMA fdw_etl REVOKE SELECT ON TABLES FROM app_user;
|
||||
-- REVOKE SELECT ON ALL TABLES IN SCHEMA fdw_etl FROM app_user;
|
||||
-- REVOKE USAGE ON SCHEMA fdw_etl FROM app_user;
|
||||
-- DROP SCHEMA IF EXISTS fdw_etl CASCADE;
|
||||
-- DROP USER MAPPING IF EXISTS FOR app_user SERVER etl_feiqiu_server;
|
||||
-- DROP SERVER IF EXISTS etl_feiqiu_server CASCADE;
|
||||
-- DROP EXTENSION IF EXISTS postgres_fdw;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user