开发机迁移

This commit is contained in:
Neo
2026-04-10 06:24:13 +08:00
parent f65c1d038b
commit 79d3c2e97e
50 changed files with 1565 additions and 318 deletions

View File

@@ -0,0 +1,26 @@
#!/usr/bin/env python3
"""PostToolUse hook: 编辑 db/ 下 SQL 文件后提醒同步 docs/database/"""
import json, re, sys
try:
data = json.load(sys.stdin)
except Exception:
sys.exit(0)
fp = (data.get("tool_input") or {}).get("file_path", "")
if not fp:
sys.exit(0)
rel = re.sub(r"^.*?NeoZQYY[/\\]", "", fp.replace("\\", "/"))
# 匹配 db/ 下的 SQL 文件schemas、migrations、脚本等
if re.search(r"^db/.*\.sql$", rel):
print(json.dumps({
"hookSpecificOutput": {
"hookEventName": "PostToolUse",
"additionalContext": (
f"[db-doc-sync] 已编辑数据库文件: {rel}"
"根据 Schema 变更规则,完成后须同步更新 docs/database/(变更说明、兼容性、回滚策略、验证 SQL"
)
}
}))

View File

@@ -0,0 +1,37 @@
#!/usr/bin/env python3
"""PostToolUse hook: 编辑含 CREATE VIEW 的 SQL 文件时提醒双 Schema 规则"""
import json, re, sys
try:
data = json.load(sys.stdin)
except Exception:
sys.exit(0)
fp = (data.get("tool_input") or {}).get("file_path", "")
if not fp:
sys.exit(0)
rel = re.sub(r"^.*?NeoZQYY[/\\]", "", fp.replace("\\", "/"))
# 仅检查 db/ 下的 SQL 文件
if not re.search(r"^db/.*\.sql$", rel):
sys.exit(0)
# 读取文件内容检查是否包含 CREATE VIEW
try:
with open(fp, "r", encoding="utf-8") as f:
content = f.read()
except Exception:
sys.exit(0)
# 检测 CREATE [OR REPLACE] VIEW 语句
if re.search(r"CREATE\s+(OR\s+REPLACE\s+)?VIEW\s+(dws|dwd|core)\.", content, re.IGNORECASE):
print(json.dumps({
"hookSpecificOutput": {
"hookEventName": "PostToolUse",
"additionalContext": (
f"[rls-dual-schema] 检测到 {rel} 中包含 dws/dwd/core schema 的 VIEW 定义 — "
"根据 RLS 视图双 Schema 规则,必须同时在原 schema 和 app schema 创建对应视图,否则后端查询会失败。"
)
}
}))

View File

@@ -0,0 +1,24 @@
#!/usr/bin/env python3
"""PreToolUse hook: 保护 demo-miniprogram 目录不被删除或移入 _DEL/"""
import json, re, sys
try:
data = json.load(sys.stdin)
except Exception:
sys.exit(0)
tool = data.get("tool_name", "")
tool_input = data.get("tool_input") or {}
DEMO_DIR = "demo-miniprogram"
if tool == "Bash":
cmd = tool_input.get("command", "")
# 检查命令是否在对 demo-miniprogram 执行删除/移动操作
if DEMO_DIR in cmd and re.search(r"\b(rm|rmdir|del|move|mv)\b", cmd, re.IGNORECASE):
# 允许 mv 到非 _DEL 目录(如正常重命名),但阻止移入 _DEL
if re.search(r"\brm\b|\brmdir\b|\bdel\b", cmd, re.IGNORECASE) or "_DEL" in cmd:
print(json.dumps({
"decision": "block",
"reason": f"[demo-protect] apps/{DEMO_DIR}/ 禁止删除或移入 _DEL/。该目录是 UI 样式标杆校对基准。"
}))

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env python3
"""PreToolUse hook: 阻止读取 _archived/ 目录下的文件"""
import json, re, sys
try:
data = json.load(sys.stdin)
except Exception:
sys.exit(0)
tool = data.get("tool_name", "")
tool_input = data.get("tool_input") or {}
# 从不同工具中提取路径
path = ""
if tool in ("Read", "Edit", "Write"):
path = tool_input.get("file_path", "")
elif tool == "Glob":
path = tool_input.get("path", "")
path = path.replace("\\", "/")
if re.search(r"/_archived/|/_archived$|^_archived/", path) or re.search(
r"[/\\]_archived[/\\]", tool_input.get("file_path", "")
):
print(json.dumps({
"decision": "block",
"reason": "[archived-block] _archived/ 目录内容已废弃,禁止读取或参考。请使用当前版本的文件。"
}))

View File

@@ -22,8 +22,5 @@ except Exception:
high_risk = result.get("high_risk_files", []) high_risk = result.get("high_risk_files", [])
if result.get("audit_required", False) and len(high_risk) > 0: if result.get("audit_required", False) and len(high_risk) > 0:
print(json.dumps({ print(json.dumps({
"hookSpecificOutput": { "systemMessage": f"[audit-check] 当前有 {len(high_risk)} 个高风险文件变更未审计。建议执行 /audit。"
"hookEventName": "Stop",
"additionalContext": f"[audit-check] 当前有 {len(high_risk)} 个高风险文件变更未审计。建议执行 /audit。"
}
})) }))

View File

@@ -0,0 +1,81 @@
#!/usr/bin/env python3
"""Stop hook: 检查是否有逻辑改动未验证未跑测试、DDL 变更未建迁移"""
import json, re, subprocess, sys, os
project_dir = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
try:
r = subprocess.run(
["git", "diff", "--name-only"],
capture_output=True, text=True, timeout=10, cwd=project_dir,
)
staged = subprocess.run(
["git", "diff", "--name-only", "--cached"],
capture_output=True, text=True, timeout=10, cwd=project_dir,
)
changed = set((r.stdout + "\n" + staged.stdout).strip().splitlines())
except Exception:
sys.exit(0)
if not changed:
sys.exit(0)
warnings = []
# --- 1. 逻辑改动未验证检查 ---
LOGIC_PATTERNS = [
r"^apps/etl/connectors/feiqiu/(tasks|loaders|scd|orchestration|config|database|models|quality)/",
r"^apps/backend/app/(routers|services|auth|schemas)/",
r"^packages/shared/",
]
logic_files = [
f for f in changed
if any(re.search(p, f) for p in LOGIC_PATTERNS)
]
if logic_files:
# 检查是否有测试结果文件被修改(间接判断是否跑了测试)
# 更可靠的方式:检查变更中是否包含测试文件
test_files = [f for f in changed if re.search(r"tests?/", f)]
if not test_files:
count = len(logic_files)
warnings.append(
f"本次会话修改了 {count} 个逻辑文件但未发现测试文件变更,"
"建议运行相关测试验证(单元/集成/lint"
)
# --- 2. DDL 变更未建迁移检查 ---
schema_files = [f for f in changed if re.search(r"^db/[^/]+/schemas/.*\.sql$", f)]
migration_files = [f for f in changed if re.search(r"^db/[^/]+/migrations/.*\.sql$", f)]
if schema_files and not migration_files:
# 也检查 untracked 的迁移文件
try:
untracked = subprocess.run(
["git", "ls-files", "--others", "--exclude-standard", "db/"],
capture_output=True, text=True, timeout=10, cwd=project_dir,
)
new_migrations = [
f for f in untracked.stdout.strip().splitlines()
if re.search(r"^db/[^/]+/migrations/.*\.sql$", f)
]
except Exception:
new_migrations = []
if not new_migrations:
dbs = set()
for f in schema_files:
m = re.match(r"^db/([^/]+)/", f)
if m:
dbs.add(m.group(1))
db_list = ", ".join(sorted(dbs))
warnings.append(
f"检测到 {db_list} 的 schemas/ DDL 已变更但无对应迁移脚本,"
"建议在 db/*/migrations/ 创建增量迁移文件"
)
# --- 输出 ---
if warnings:
msg = "[verify-check] " + " | ".join(warnings)
print(json.dumps({"systemMessage": msg}))

View File

@@ -32,7 +32,10 @@
"Bash(cp -r tmp _DEL/tmp_backup)", "Bash(cp -r tmp _DEL/tmp_backup)",
"Bash(*)", "Bash(*)",
"Bash(touch tmp/.gitkeep)", "Bash(touch tmp/.gitkeep)",
"Bash(ls -la c:/NeoZQYY/docs/audit/session_logs/_session_index*.json)" "Bash(ls -la c:/NeoZQYY/docs/audit/session_logs/_session_index*.json)",
"mcp__pg-etl-test__execute_sql",
"mcp__pg-app-test__execute_sql",
"mcp__pg-app-test__list_schemas"
], ],
"additionalDirectories": [ "additionalDirectories": [
"C:\\Users\\Administrator\\.claude", "C:\\Users\\Administrator\\.claude",
@@ -52,6 +55,28 @@
] ]
} }
], ],
"PreToolUse": [
{
"matcher": "Read|Edit|Write|Glob",
"hooks": [
{
"type": "command",
"command": "python \"$CLAUDE_PROJECT_DIR/.claude/hooks/pre_read_archived_block.py\"",
"timeout": 5
}
]
},
{
"matcher": "Bash|Edit|Write",
"hooks": [
{
"type": "command",
"command": "python \"$CLAUDE_PROJECT_DIR/.claude/hooks/pre_demo_protect.py\"",
"timeout": 5
}
]
}
],
"PostToolUse": [ "PostToolUse": [
{ {
"matcher": "Edit|Write", "matcher": "Edit|Write",
@@ -62,6 +87,26 @@
"timeout": 5 "timeout": 5
} }
] ]
},
{
"matcher": "Edit|Write",
"hooks": [
{
"type": "command",
"command": "python \"$CLAUDE_PROJECT_DIR/.claude/hooks/post_edit_db_doc_sync.py\"",
"timeout": 5
}
]
},
{
"matcher": "Edit|Write",
"hooks": [
{
"type": "command",
"command": "python \"$CLAUDE_PROJECT_DIR/.claude/hooks/post_edit_rls_dual_schema.py\"",
"timeout": 10
}
]
} }
], ],
"Stop": [ "Stop": [
@@ -73,6 +118,15 @@
"timeout": 15 "timeout": 15
} }
] ]
},
{
"hooks": [
{
"type": "command",
"command": "python \"$CLAUDE_PROJECT_DIR/.claude/hooks/stop_verify_check.py\"",
"timeout": 15
}
]
} }
] ]
} }

55
.env
View File

@@ -104,25 +104,37 @@ SYSTEM_LOG_ROOT=C:/NeoZQYY/export/SYSTEM/LOGS
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# 后端结构化日志目录 # 后端结构化日志目录
BACKEND_LOG_ROOT=C:/NeoZQYY/export/BACKEND/LOGS BACKEND_LOG_ROOT=C:/NeoZQYY/export/BACKEND/LOGS
# 用户头像存储目录
AVATAR_EXPORT_PATH=C:/NeoZQYY/export/BACKEND/avatars
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# 阿里云百炼 AI 配置 # DashScope AI 配置(百炼 Application API
# CHANGE 2026-02-23 | 从 PRD 文档迁移至 .env禁止在文档中明文存放 # CHANGE 2026-02-23 | 从 PRD 文档迁移至 .env禁止在文档中明文存放
# CHANGE P14 | BAILIAN_* → DASHSCOPE_*;移除 BASE_URL/MODELApplication API 不需要)
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
BAILIAN_API_KEY=sk-6def29cab3474cc797e52b82a46a5dba DASHSCOPE_API_KEY=sk-6def29cab3474cc797e52b82a46a5dba
BAILIAN_MODEL=qwen-plus DASHSCOPE_WORKSPACE_ID=
BAILIAN_BASE_URL=https://dashscope.aliyuncs.com/compatible-mode/v1
BAILIAN_TEST_APP_ID=541edb3d5fcd4c18b13cbad81bb5fb9d
# CHANGE 2026-03-05 | 8 个百炼 AI 应用 ID从百炼平台获取2026-03-05 更新 # 8 个百炼 AI 应用 ID从百炼平台获取通过 app_id 指定应用
BAILIAN_APP_ID_1_CHAT=979dabe6f22a43989632b8c662cac97c # 应用 1通用对话 | 应用 2财务洞察 | 应用 3客户数据维客线索分析
BAILIAN_APP_ID_2_FINANCE=1dcdb5f39c3040b6af8ef79215b9b051 # 应用 4关系分析/任务建议 | 应用 5话术参考 | 应用 6备注分析
BAILIAN_APP_ID_3_CLUE=708bf45439cd48c7ab9a514d03482890 # 应用 7客户分析 | 应用 8维客线索整理
BAILIAN_APP_ID_4_ANALYSIS=ea7b1c374f574b9a925a2fb5789a9b90 DASHSCOPE_APP_ID_1_CHAT=979dabe6f22a43989632b8c662cac97c
BAILIAN_APP_ID_5_TACTICS=46f54e6053df4bb0b83be29366025cf6 DASHSCOPE_APP_ID_2_FINANCE=1dcdb5f39c3040b6af8ef79215b9b051
BAILIAN_APP_ID_6_NOTE=025bb344146b4e4e8be30c444adab3b4 DASHSCOPE_APP_ID_3_CLUE=708bf45439cd48c7ab9a514d03482890
BAILIAN_APP_ID_7_CUSTOMER=df35e06991b24d49971c03c6428a9c87 DASHSCOPE_APP_ID_4_ANALYSIS=ea7b1c374f574b9a925a2fb5789a9b90
BAILIAN_APP_ID_8_CONSOLIDATE=407dfb89283b4196934eec5fefe3ebc2 DASHSCOPE_APP_ID_5_TACTICS=46f54e6053df4bb0b83be29366025cf6
DASHSCOPE_APP_ID_6_NOTE=025bb344146b4e4e8be30c444adab3b4
DASHSCOPE_APP_ID_7_CUSTOMER=df35e06991b24d49971c03c6428a9c87
DASHSCOPE_APP_ID_8_CONSOLIDATE=407dfb89283b4196934eec5fefe3ebc2
# 应用 9Session 日志摘要生成Kiro agent_on_stop + batch_generate_summaries 使用)
DASHSCOPE_APP_ID_SUMMARY=e0cf8913b1ee4a4eb9464cc1ee0bf300
# 内部 API 认证 tokenETL 等内部服务调用 /api/internal/* 端点时使用)
INTERNAL_API_TOKEN=C4Rs45fEoMC3u2PR4-jvakl8SBYpU9kV7JFiTj-TJAc
# 后端 API 地址ETL 触发 AI 事件时使用,如 http://localhost:8000
BACKEND_API_URL=http://localhost:8000
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# 微信小程序 # 微信小程序
@@ -147,3 +159,18 @@ PIPELINE_RATE_MAX=2.0
OPS_SERVER_BASE=C:/NeoZQYY OPS_SERVER_BASE=C:/NeoZQYY
ETL_PROJECT_PATH=C:/NeoZQYY/apps/etl/connectors/feiqiu ETL_PROJECT_PATH=C:/NeoZQYY/apps/etl/connectors/feiqiu
ETL_PYTHON_EXECUTABLE=C:/NeoZQYY/.venv/Scripts/python.exe ETL_PYTHON_EXECUTABLE=C:/NeoZQYY/.venv/Scripts/python.exe
# === Dev Trace Log ===
# 全链路请求追踪日志(仅开发/测试环境使用,生产环境关闭)
DEV_TRACE_ENABLED=true
DEV_TRACE_LOG_DIR=export/dev-trace-logs
DEV_TRACE_LOG_RETENTION_DAYS=7
DEV_TRACE_LOG_SQL=true
DEV_TRACE_LOG_PARAMS=true
# ------------------------------------------------------------------------------
# DWS 工资计算配置
# CHANGE 2026-03-27 | 允许非月初结算期运行工资计算任务(临时开关)
# 正常调度只在月初 1-5 号跑上月工资,此开关允许月中手动跑当月工资
# ------------------------------------------------------------------------------
DWS_SALARY_ALLOW_OUT_OF_CYCLE=true

View File

@@ -33,18 +33,18 @@
"disabled": false "disabled": false
}, },
"weixin-devtools-mcp": { "weixin-devtools-mcp": {
"command": "npx", "command": "cmd",
"args": ["-y", "weixin-devtools-mcp", "--tools-profile=full", "--ws-endpoint=ws://127.0.0.1:9420"], "args": ["/c", "npx", "-y", "weixin-devtools-mcp", "--tools-profile=full", "--ws-endpoint=ws://127.0.0.1:9420"],
"env": { "env": {
"WECHAT_DEVTOOLS_CLI": "C:\\dev\\WechatDevtools\\cli.bat", "WECHAT_DEVTOOLS_CLI": "C:\\dev\\WechatDevtools\\cli.bat",
"WECHAT_DEVTOOLS_PROJECT": "C:\\NeoZQYY\\apps\\miniprogram" "WECHAT_DEVTOOLS_PROJECT": "C:\\NeoZQYY\\apps\\miniprogram"
}, },
"disabled": true "disabled": false
}, },
"playwright": { "playwright": {
"command": "npx", "command": "cmd",
"args": ["@playwright/mcp@latest"], "args": ["/c", "npx", "@playwright/mcp@latest"],
"disabled": true "disabled": false
}, },
"openapi": { "openapi": {
"command": "uv", "command": "uv",
@@ -61,7 +61,7 @@
"env": { "env": {
"PYTHONUTF8": "1" "PYTHONUTF8": "1"
}, },
"disabled": true "disabled": false
} }
} }
} }

View File

@@ -156,6 +156,15 @@ cd apps/miniprogram && npm test # 小程序 Jest
例外:纯格式调整、注释/文档纯文字修改、用户说"直接改/跳过调研"、新建文件且不涉及已有逻辑 例外:纯格式调整、注释/文档纯文字修改、用户说"直接改/跳过调研"、新建文件且不涉及已有逻辑
## 改动后验证(强制)
逻辑改动完成后,必须执行以下验证步骤:
1. 运行相关测试(单元/集成/lint如无法运行需说明原因
2. 输出 diff 摘要(改了哪些文件、每个文件改动要点)
3. 列出未覆盖的风险点(未测试的路径、可能的副作用、需要人工验证的场景)
例外:纯格式/文档/注释调整、用户说"跳过验证"
## 数据库 Schema 变更规则 ## 数据库 Schema 变更规则
修改任何影响 PostgreSQL schema 的内容(迁移/DDL/表定义/ORM 模型)时,必须同步更新 `docs/database/` 修改任何影响 PostgreSQL schema 的内容(迁移/DDL/表定义/ORM 模型)时,必须同步更新 `docs/database/`

View File

@@ -31,6 +31,31 @@ from app.config import (
ETL_DB_USER, ETL_DB_USER,
) )
# TCP keepalive 参数:防止长期运行的后台服务连接被网络设备/PostgreSQL 回收
_KEEPALIVE_KWARGS = {
"keepalives": 1,
"keepalives_idle": 60, # 空闲 60 秒后开始探测
"keepalives_interval": 10, # 每 10 秒探测一次
"keepalives_count": 3, # 连续 3 次失败判定断开
}
# 连接重试参数:应对 PostgreSQL 瞬时不可用Tailscale 网络抖动等)
_CONNECT_MAX_RETRIES = 3
_CONNECT_RETRY_DELAY = 1.0 # 秒
def _connect_with_retry(connect_fn, max_retries=_CONNECT_MAX_RETRIES):
"""带重试的数据库连接,应对服务端瞬时拒绝连接。"""
last_exc = None
for attempt in range(max_retries):
try:
return connect_fn()
except psycopg2.OperationalError as e:
last_exc = e
if attempt < max_retries - 1:
time.sleep(_CONNECT_RETRY_DELAY * (attempt + 1))
raise last_exc
def get_connection() -> PgConnection: def get_connection() -> PgConnection:
""" """
@@ -49,13 +74,14 @@ def get_connection() -> PgConnection:
start = time.perf_counter() if should_trace else 0.0 start = time.perf_counter() if should_trace else 0.0
conn = psycopg2.connect( conn = _connect_with_retry(lambda: psycopg2.connect(
host=DB_HOST, host=DB_HOST,
port=DB_PORT, port=DB_PORT,
user=DB_USER, user=DB_USER,
password=DB_PASSWORD, password=DB_PASSWORD,
dbname=APP_DB_NAME, dbname=APP_DB_NAME,
) **_KEEPALIVE_KWARGS,
))
if should_trace: if should_trace:
from datetime import datetime from datetime import datetime
@@ -86,13 +112,14 @@ def get_etl_global_readonly_connection() -> PgConnection:
用于系统管理后台等不需要门店隔离的场景(如 ETL 状态监控)。 用于系统管理后台等不需要门店隔离的场景(如 ETL 状态监控)。
""" """
conn = psycopg2.connect( conn = _connect_with_retry(lambda: psycopg2.connect(
host=ETL_DB_HOST, host=ETL_DB_HOST,
port=ETL_DB_PORT, port=ETL_DB_PORT,
user=ETL_DB_USER, user=ETL_DB_USER,
password=ETL_DB_PASSWORD, password=ETL_DB_PASSWORD,
dbname=ETL_DB_NAME, dbname=ETL_DB_NAME,
) **_KEEPALIVE_KWARGS,
))
try: try:
conn.autocommit = False conn.autocommit = False
with conn.cursor() as cur: with conn.cursor() as cur:
@@ -121,13 +148,14 @@ def get_etl_readonly_connection(site_id: int | str) -> PgConnection:
finally: finally:
conn.close() conn.close()
""" """
conn = psycopg2.connect( conn = _connect_with_retry(lambda: psycopg2.connect(
host=ETL_DB_HOST, host=ETL_DB_HOST,
port=ETL_DB_PORT, port=ETL_DB_PORT,
user=ETL_DB_USER, user=ETL_DB_USER,
password=ETL_DB_PASSWORD, password=ETL_DB_PASSWORD,
dbname=ETL_DB_NAME, dbname=ETL_DB_NAME,
) **_KEEPALIVE_KWARGS,
))
try: try:
conn.autocommit = False conn.autocommit = False
with conn.cursor() as cur: with conn.cursor() as cur:
@@ -142,3 +170,23 @@ def get_etl_readonly_connection(site_id: int | str) -> PgConnection:
conn.close() conn.close()
raise raise
return conn return conn
def get_etl_write_connection() -> PgConnection:
"""
获取 ETL 数据库etl_feiqiu的可写连接。
仅用于后端需要回写 ETL 汇总表的场景(如 task_generator 回写关系指数统计)。
不设置 RLS 隔离,调用方需在 SQL 中显式指定 site_id。
调用方负责关闭连接。
"""
conn = _connect_with_retry(lambda: psycopg2.connect(
host=ETL_DB_HOST,
port=ETL_DB_PORT,
user=ETL_DB_USER,
password=ETL_DB_PASSWORD,
dbname=ETL_DB_NAME,
**_KEEPALIVE_KWARGS,
))
conn.autocommit = False
return conn

View File

@@ -39,6 +39,61 @@ class RunJobByNameResponse(BaseModel):
job_name: str job_name: str
class EtlCompletedRequest(BaseModel):
"""ETL 完成通知请求体。"""
pipeline: str = Field(default="api_full", description="完成的 pipeline 名称")
site_id: int | None = Field(default=None, description="门店 ID可选")
class EtlCompletedResponse(BaseModel):
"""ETL 完成编排结果。"""
success: bool
recall_result: dict | None = None
task_gen_result: dict | None = None
message: str = ""
@router.post("/etl-completed", response_model=EtlCompletedResponse)
async def etl_completed_endpoint(
body: EtlCompletedRequest,
_token: str = Depends(verify_internal_token),
) -> EtlCompletedResponse:
"""ETL pipeline 完成后的统一编排入口。
CHANGE 2026-04-07 | Fix-12ETL 完成后自动触发。
编排顺序recall_detector.run() → task_generator.run()
"""
from app.services import recall_detector, task_generator
recall_result = None
task_gen_result = None
errors = []
# Step 1: 先检查召回完成(含回溯)
try:
recall_result = recall_detector.run()
logger.info("ETL 编排 Step1 recall_detector 完成: %s", recall_result)
except Exception:
logger.exception("ETL 编排 Step1 recall_detector 失败")
errors.append("recall_detector failed")
# Step 2: 再生成新任务
try:
task_gen_result = task_generator.run()
logger.info("ETL 编排 Step2 task_generator 完成: %s", task_gen_result)
except Exception:
logger.exception("ETL 编排 Step2 task_generator 失败")
errors.append("task_generator failed")
success = len(errors) == 0
return EtlCompletedResponse(
success=success,
recall_result=recall_result,
task_gen_result=task_gen_result,
message="; ".join(errors) if errors else "ok",
)
@router.post("/run-job", response_model=RunJobByNameResponse) @router.post("/run-job", response_model=RunJobByNameResponse)
async def run_job_by_name_endpoint( async def run_job_by_name_endpoint(
body: RunJobByNameRequest, body: RunJobByNameRequest,

View File

@@ -12,6 +12,7 @@
- POST /api/xcx/tasks/{id}/restore — 恢复任务 - POST /api/xcx/tasks/{id}/restore — 恢复任务
所有端点均需 JWTapproved 状态)。 所有端点均需 JWTapproved 状态)。
回访任务通过提交备注自动完成note_service不提供手动完成接口。
""" """
from __future__ import annotations from __future__ import annotations
@@ -20,6 +21,7 @@ from fastapi import APIRouter, Depends, Query
from app.auth.dependencies import CurrentUser from app.auth.dependencies import CurrentUser
from app.middleware.permission import require_approved, require_permission from app.middleware.permission import require_approved, require_permission
from app.schemas.xcx_tasks import ( from app.schemas.xcx_tasks import (
AbandonRequest, AbandonRequest,
TaskDetailResponse, TaskDetailResponse,

View File

@@ -375,9 +375,9 @@ async def get_coach_board(
"task_total": task_recall + task_callback, "task_total": task_recall + task_callback,
}) })
# 7. 排序 # 7. 排序id 作 tiebreaker 保证分页稳定)
sort_key, sort_desc = _SORT_KEY_MAP.get(sort, ("perf_hours", True)) sort_key, sort_desc = _SORT_KEY_MAP.get(sort, ("perf_hours", True))
items.sort(key=lambda x: x.get(sort_key, 0), reverse=sort_desc) items.sort(key=lambda x: (x.get(sort_key, 0), x.get("id", 0)), reverse=sort_desc)
# 移除内部排序字段 # 移除内部排序字段
for item in items: for item in items:
@@ -406,7 +406,9 @@ def _query_coach_tasks(
""" """
查询助教任务完成数BOARD-1 task 维度)。 查询助教任务完成数BOARD-1 task 维度)。
来源: biz.coach_tasks按 task_type 分类统计 recall/callback。 CHANGE 2026-04-08 | Fix-13 改造:
- recall: 广义召回数(从 biz.recall_events 统计,按天去重,不重复叠加)
- callback: 回访完成数(从 biz.coach_tasks 统计status='completed'
""" """
if not assistant_ids: if not assistant_ids:
return {} return {}
@@ -414,26 +416,42 @@ def _query_coach_tasks(
result: dict[int, dict] = {} result: dict[int, dict] = {}
try: try:
with conn.cursor() as cur: with conn.cursor() as cur:
# 广义召回数:从 recall_events 统计(天然去重)
cur.execute( cur.execute(
""" """
SELECT assistant_id, SELECT assistant_id, COUNT(*) AS recall_count
COUNT(*) FILTER (WHERE task_type IN ('high_priority_recall', 'priority_recall')) AS recall_count, FROM biz.recall_events
COUNT(*) FILTER (WHERE task_type = 'relationship_building') AS callback_count
FROM biz.coach_tasks
WHERE assistant_id = ANY(%s) WHERE assistant_id = ANY(%s)
AND site_id = %s AND site_id = %s
AND completed_at >= %s::date AND pay_time >= %s::date
AND completed_at <= %s::date AND pay_time < (%s::date + INTERVAL '1 day')
AND status = 'completed'
GROUP BY assistant_id GROUP BY assistant_id
""", """,
(assistant_ids, site_id, start_date, end_date), (assistant_ids, site_id, start_date, end_date),
) )
for row in cur.fetchall(): for row in cur.fetchall():
result[row[0]] = { result.setdefault(row[0], {"recall": 0, "callback": 0})
"recall": row[1] or 0, result[row[0]]["recall"] = row[1] or 0
"callback": row[2] or 0,
} # 回访完成数:从 coach_tasks 统计
cur.execute(
"""
SELECT assistant_id, COUNT(*) AS callback_count
FROM biz.coach_tasks
WHERE assistant_id = ANY(%s)
AND site_id = %s
AND completed_at >= %s::date
AND completed_at < (%s::date + INTERVAL '1 day')::timestamptz
AND status = 'completed'
AND task_type = 'follow_up_visit'
GROUP BY assistant_id
""",
(assistant_ids, site_id, start_date, end_date),
)
for row in cur.fetchall():
result.setdefault(row[0], {"recall": 0, "callback": 0})
result[row[0]]["callback"] = row[1] or 0
conn.commit() conn.commit()
except Exception: except Exception:
logger.warning("BOARD-1 任务查询失败,降级为空", exc_info=True) logger.warning("BOARD-1 任务查询失败,降级为空", exc_info=True)
@@ -762,10 +780,15 @@ def _build_overview(
) -> dict: ) -> dict:
"""经营一览板块。 """经营一览板块。
CHANGE 2026-04-01 | board-finance-dws-area-refactor 9.1 | CHANGE 2026-04-08 | P0 修复发生额失真 |
改为从 dws_finance_area_daily 按 area_code 查询(通过 get_finance_overview_area area=all 时回退到 dws_finance_daily_summaryget_finance_overview
仅 area≠all 时走 dws_finance_area_dailyget_finance_overview_area
原因area_daily 的 all 行只聚合有桌台映射的结算单,漏算无桌台单据约 12%
""" """
try: try:
if area == "all":
data = fdw_queries.get_finance_overview(conn, site_id, start, end)
else:
data = fdw_queries.get_finance_overview_area(conn, site_id, start, end, area) data = fdw_queries.get_finance_overview_area(conn, site_id, start, end, area)
except Exception: except Exception:
logger.warning("overview 查询失败,降级为空", exc_info=True) logger.warning("overview 查询失败,降级为空", exc_info=True)
@@ -775,6 +798,9 @@ def _build_overview(
if compare == 1 and prev_start and prev_end: if compare == 1 and prev_start and prev_end:
try: try:
if area == "all":
prev = fdw_queries.get_finance_overview(conn, site_id, prev_start, prev_end)
else:
prev = fdw_queries.get_finance_overview_area(conn, site_id, prev_start, prev_end, area) prev = fdw_queries.get_finance_overview_area(conn, site_id, prev_start, prev_end, area)
_attach_compare(result, data, prev, [ _attach_compare(result, data, prev, [
"occurrence", "discount", "discount_rate", "confirmed_revenue", "occurrence", "discount", "discount_rate", "confirmed_revenue",

View File

@@ -937,18 +937,18 @@ def get_consumption_60d(
""" """
查询客户近 60 天消费金额。 查询客户近 60 天消费金额。
来源: app.v_dwd_assistant_service_log 来源: app.v_dws_member_consumption_summaryDWS 预聚合表)
⚠️ DWD-DOC 规则 1: 使用 ledger_amountitems_sum 口径) 与 board-customer spend60 维度统一口径items_sum60天窗口日粒度
⚠️ 废单排除: is_delete = 0 取最新 stat_date 的快照行
""" """
with _fdw_context(conn, site_id, etl_conn=etl_conn) as cur: with _fdw_context(conn, site_id, etl_conn=etl_conn) as cur:
cur.execute( cur.execute(
""" """
SELECT COALESCE(SUM(ledger_amount), 0) SELECT consume_amount_60d
FROM app.v_dwd_assistant_service_log FROM app.v_dws_member_consumption_summary
WHERE tenant_member_id = %s WHERE member_id = %s
AND is_delete = 0 ORDER BY stat_date DESC
AND create_time >= (CURRENT_DATE - INTERVAL '60 days')::timestamptz LIMIT 1
""", """,
(member_id,), (member_id,),
) )
@@ -1729,6 +1729,8 @@ def get_coach_sv_data(
result: dict[int, dict] = {} result: dict[int, dict] = {}
with _fdw_context(conn, site_id) as cur: with _fdw_context(conn, site_id) as cur:
# CHANGE 2026-04-07 | Fix-6sv_consume 改为从结算表按 start_date/end_date 过滤,
# 使其随时间筛选联动,而非固定 60 天窗口。
cur.execute( cur.execute(
""" """
WITH coach_members AS ( WITH coach_members AS (
@@ -1737,11 +1739,21 @@ def get_coach_sv_data(
FROM app.v_dws_member_assistant_relation_index ri FROM app.v_dws_member_assistant_relation_index ri
WHERE ri.assistant_id = ANY(%s) WHERE ri.assistant_id = ANY(%s)
AND ri.session_count > 0 AND ri.session_count > 0
),
period_consume AS (
SELECT sh.member_id,
COALESCE(SUM(sh.items_sum), 0) AS consume_amount
FROM app.v_dwd_settlement_head sh
WHERE sh.member_id = ANY(SELECT member_id FROM coach_members)
AND sh.settle_type IN (1, 3)
AND sh.pay_time::date >= %s::date
AND sh.pay_time::date <= %s::date
GROUP BY sh.member_id
) )
SELECT cm.assistant_id, SELECT cm.assistant_id,
COALESCE(SUM(ca_agg.balance), 0) AS sv_amount, COALESCE(SUM(ca_agg.balance), 0) AS sv_amount,
COUNT(DISTINCT CASE WHEN ca_agg.balance > 0 THEN cm.member_id END) AS sv_customer_count, COUNT(DISTINCT CASE WHEN ca_agg.balance > 0 THEN cm.member_id END) AS sv_customer_count,
COALESCE(SUM(cs.consume_amount_60d), 0) AS sv_consume COALESCE(SUM(pc.consume_amount), 0) AS sv_consume
FROM coach_members cm FROM coach_members cm
LEFT JOIN ( LEFT JOIN (
SELECT tenant_member_id, SUM(balance) AS balance SELECT tenant_member_id, SUM(balance) AS balance
@@ -1749,11 +1761,11 @@ def get_coach_sv_data(
WHERE scd2_is_current = 1 WHERE scd2_is_current = 1
GROUP BY tenant_member_id GROUP BY tenant_member_id
) ca_agg ON cm.member_id = ca_agg.tenant_member_id ) ca_agg ON cm.member_id = ca_agg.tenant_member_id
LEFT JOIN app.v_dws_member_consumption_summary cs LEFT JOIN period_consume pc
ON cm.member_id = cs.member_id ON cm.member_id = pc.member_id
GROUP BY cm.assistant_id GROUP BY cm.assistant_id
""", """,
(assistant_ids,), (assistant_ids, start_date, end_date),
) )
for row in cur.fetchall(): for row in cur.fetchall():
result[row[0]] = { result[row[0]] = {
@@ -1769,19 +1781,20 @@ def get_coach_sv_data(
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
def _project_filter_clause(project: str) -> tuple[str, tuple]: def _project_filter_clause(project: str, member_col: str = "member_id") -> tuple[str, tuple]:
""" """
生成项目筛选 SQL 片段(用于 BOARD-2 会员维度查询)。 生成项目筛选 SQL 片段(用于 BOARD-2 会员维度查询)。
CHANGE 2026-03-20 | R3 修复project 参数直接接收 category_code CHANGE 2026-03-20 | R3 修复project 参数直接接收 category_code
BILLIARD/SNOOKER/MAHJONG/KTV/ALL去掉 chinese→BILLIARD 映射层。 BILLIARD/SNOOKER/MAHJONG/KTV/ALL去掉 chinese→BILLIARD 映射层。
CHANGE 2026-04-07 | Fix-1member_col 参数化,修复 6 个维度别名不匹配导致 SQL 500。
返回 (sql_fragment, params)sql_fragment 以 AND 开头,可直接拼入 WHERE 子句。 返回 (sql_fragment, params)sql_fragment 以 AND 开头,可直接拼入 WHERE 子句。
""" """
_valid_categories = {"BILLIARD", "SNOOKER", "MAHJONG", "KTV"} _valid_categories = {"BILLIARD", "SNOOKER", "MAHJONG", "KTV"}
if project == "ALL" or project not in _valid_categories: if project == "ALL" or project not in _valid_categories:
return "", () return "", ()
clause = """ clause = f"""
AND vd.member_id IN ( AND {member_col} IN (
SELECT mpt.member_id SELECT mpt.member_id
FROM app.v_dws_member_project_tag mpt FROM app.v_dws_member_project_tag mpt
WHERE mpt.category_code = %s AND mpt.is_tagged = true WHERE mpt.category_code = %s AND mpt.is_tagged = true
@@ -1802,13 +1815,13 @@ def get_customer_board_recall(
ideal_days → ideal_interval_days, wbi_score → display_score, ideal_days → ideal_interval_days, wbi_score → display_score,
elapsed_days → CURRENT_DATE - last_visit_time::date (计算列), elapsed_days → CURRENT_DATE - last_visit_time::date (计算列),
overdue_days → elapsed_days - ideal_interval_days (计算列), overdue_days → elapsed_days - ideal_interval_days (计算列),
visits_30d 不存在(有 visits_14d/visits_60d),用 visits_14d 近似, CHANGE 2026-04-07 | Fix-3visits_30d 新增字段,替代 visits_14d 近似,
balance_amount → balance (v_dim_member_card_account) balance_amount → balance (v_dim_member_card_account)
⚠️ DQ-6: 客户姓名通过 member_id JOIN v_dim_member。 ⚠️ DQ-6: 客户姓名通过 member_id JOIN v_dim_member。
⚠️ DQ-7: 余额通过 JOIN v_dim_member_card_account。 ⚠️ DQ-7: 余额通过 JOIN v_dim_member_card_account。
按 display_score 降序LIMIT/OFFSET 分页。 按 display_score 降序LIMIT/OFFSET 分页。
""" """
proj_clause, proj_params = _project_filter_clause(project) proj_clause, proj_params = _project_filter_clause(project, "wi.member_id")
with _fdw_context(conn, site_id) as cur: with _fdw_context(conn, site_id) as cur:
# 总数 # 总数
@@ -1817,6 +1830,7 @@ def get_customer_board_recall(
SELECT COUNT(*) SELECT COUNT(*)
FROM app.v_dws_member_winback_index wi FROM app.v_dws_member_winback_index wi
WHERE 1=1 {proj_clause} WHERE 1=1 {proj_clause}
""", """,
proj_params, proj_params,
) )
@@ -1831,7 +1845,7 @@ def get_customer_board_recall(
wi.ideal_interval_days, wi.ideal_interval_days,
CURRENT_DATE - wi.last_visit_time::date AS elapsed_days, CURRENT_DATE - wi.last_visit_time::date AS elapsed_days,
(CURRENT_DATE - wi.last_visit_time::date) - COALESCE(wi.ideal_interval_days, 0) AS overdue_days, (CURRENT_DATE - wi.last_visit_time::date) - COALESCE(wi.ideal_interval_days, 0) AS overdue_days,
wi.visits_14d, wi.visits_30d,
wi.display_score, wi.display_score,
COALESCE(ca.balance, 0) AS balance COALESCE(ca.balance, 0) AS balance
FROM app.v_dws_member_winback_index wi FROM app.v_dws_member_winback_index wi
@@ -1844,7 +1858,7 @@ def get_customer_board_recall(
GROUP BY tenant_member_id GROUP BY tenant_member_id
) ca ON wi.member_id = ca.tenant_member_id ) ca ON wi.member_id = ca.tenant_member_id
WHERE 1=1 {proj_clause} WHERE 1=1 {proj_clause}
ORDER BY wi.display_score DESC ORDER BY wi.display_score DESC, wi.member_id
LIMIT %s OFFSET %s LIMIT %s OFFSET %s
""", """,
(*proj_params, page_size, offset), (*proj_params, page_size, offset),
@@ -1870,16 +1884,18 @@ def _derive_potential_tags(
level_score: float | None, level_score: float | None,
speed_score: float | None, speed_score: float | None,
stability_score: float | None, stability_score: float | None,
) -> list[str]: ) -> list[dict]:
"""从三维分数派生潜力标签display_score 为 0-100 区间)。""" """从三维分数派生潜力标签display_score 为 0-10 区间)。
tags = [] 返回 [{text, theme}] 格式,与前端 potentialTags 类型一致。
threshold = 60.0 """
tags: list[dict] = []
threshold = 6.0
if level_score is not None and float(level_score) >= threshold: if level_score is not None and float(level_score) >= threshold:
tags.append("high_level") tags.append({"text": "高消费力", "theme": "success"})
if speed_score is not None and float(speed_score) >= threshold: if speed_score is not None and float(speed_score) >= threshold:
tags.append("fast_growth") tags.append({"text": "快增长", "theme": "warning"})
if stability_score is not None and float(stability_score) >= threshold: if stability_score is not None and float(stability_score) >= threshold:
tags.append("stable") tags.append({"text": "稳定", "theme": "primary"})
return tags return tags
@@ -1934,7 +1950,7 @@ def get_customer_board_potential(
) ca_agg ON spi.member_id = ca_agg.tenant_member_id ) ca_agg ON spi.member_id = ca_agg.tenant_member_id
WHERE 1=1 WHERE 1=1
{f"AND spi.member_id IN (SELECT member_id FROM app.v_dws_member_project_tag WHERE category_code = %s AND is_tagged = true)" if proj_params else ""} {f"AND spi.member_id IN (SELECT member_id FROM app.v_dws_member_project_tag WHERE category_code = %s AND is_tagged = true)" if proj_params else ""}
ORDER BY spi.display_score DESC NULLS LAST, COALESCE(ca_agg.balance, 0) DESC ORDER BY spi.display_score DESC NULLS LAST, COALESCE(ca_agg.balance, 0) DESC, spi.member_id
LIMIT %s OFFSET %s LIMIT %s OFFSET %s
""", """,
(*proj_params, page_size, offset), (*proj_params, page_size, offset),
@@ -1969,7 +1985,7 @@ def get_customer_board_balance(
⚠️ DQ-7: 余额通过 tenant_member_id JOIN取 scd2_is_current=1。 ⚠️ DQ-7: 余额通过 tenant_member_id JOIN取 scd2_is_current=1。
按 balance 降序。 按 balance 降序。
""" """
proj_clause, proj_params = _project_filter_clause(project) proj_clause, proj_params = _project_filter_clause(project, "ca.tenant_member_id")
with _fdw_context(conn, site_id) as cur: with _fdw_context(conn, site_id) as cur:
# CHANGE 2026-03-28 | 修复客户重复dim_member_card_account 同一 member 有多条记录, # CHANGE 2026-03-28 | 修复客户重复dim_member_card_account 同一 member 有多条记录,
@@ -2009,9 +2025,14 @@ def get_customer_board_balance(
) ca_agg ) ca_agg
LEFT JOIN app.v_dim_member dm LEFT JOIN app.v_dim_member dm
ON ca_agg.member_id = dm.member_id AND dm.scd2_is_current = 1 ON ca_agg.member_id = dm.member_id AND dm.scd2_is_current = 1
LEFT JOIN app.v_dws_member_consumption_summary vd LEFT JOIN LATERAL (
ON ca_agg.member_id = vd.member_id SELECT cs.days_since_last, cs.consume_amount_60d
ORDER BY ca_agg.balance DESC FROM app.v_dws_member_consumption_summary cs
WHERE cs.member_id = ca_agg.member_id
ORDER BY cs.stat_date DESC
LIMIT 1
) vd ON true
ORDER BY ca_agg.balance DESC, ca_agg.member_id
LIMIT %s OFFSET %s LIMIT %s OFFSET %s
""", """,
(*proj_params, page_size, offset), (*proj_params, page_size, offset),
@@ -2026,9 +2047,10 @@ def get_customer_board_balance(
"last_visit": f"{row[3]}天前" if row[3] is not None else "--", "last_visit": f"{row[3]}天前" if row[3] is not None else "--",
"last_visit_date": row[3], "last_visit_date": row[3],
"ideal_days": None, # balance 维度无 ideal_days由 board_service 补充 "ideal_days": None, # balance 维度无 ideal_days由 board_service 补充
"monthly_consume": float(row[4]) if row[4] is not None else 0.0, # CHANGE 2026-04-07 | Fix-4consume_amount_60d 是 60 天总额,月均 = /2
"monthly_consume": float(row[4]) / 2 if row[4] is not None else 0.0,
"available_months": ( "available_months": (
f"{float(row[2]) / float(row[4]):.1f}个月" f"{2 * float(row[2]) / float(row[4]):.1f}个月"
if row[2] and row[4] and float(row[4]) > 0 if row[2] and row[4] and float(row[4]) > 0
else "--" else "--"
), ),
@@ -2050,7 +2072,7 @@ def get_customer_board_recharge(
balance_amount → balance (v_dim_member_card_account) balance_amount → balance (v_dim_member_card_account)
按 last_recharge_date (MAX(pay_time::date)) 降序。 按 last_recharge_date (MAX(pay_time::date)) 降序。
""" """
proj_clause, proj_params = _project_filter_clause(project) proj_clause, proj_params = _project_filter_clause(project, "ro.member_id")
with _fdw_context(conn, site_id) as cur: with _fdw_context(conn, site_id) as cur:
cur.execute( cur.execute(
@@ -2084,11 +2106,16 @@ def get_customer_board_recharge(
WHERE scd2_is_current = 1 WHERE scd2_is_current = 1
GROUP BY tenant_member_id GROUP BY tenant_member_id
) ca_agg ON ro.member_id = ca_agg.tenant_member_id ) ca_agg ON ro.member_id = ca_agg.tenant_member_id
LEFT JOIN app.v_dws_member_consumption_summary cs LEFT JOIN LATERAL (
ON ro.member_id = cs.member_id SELECT cs2.days_since_last
FROM app.v_dws_member_consumption_summary cs2
WHERE cs2.member_id = ro.member_id
ORDER BY cs2.stat_date DESC
LIMIT 1
) cs ON true
WHERE 1=1 {proj_clause} WHERE 1=1 {proj_clause}
GROUP BY ro.member_id, dm.nickname, ca_agg.balance, cs.days_since_last GROUP BY ro.member_id, dm.nickname, ca_agg.balance, cs.days_since_last
ORDER BY MAX(ro.pay_time::date) DESC ORDER BY MAX(ro.pay_time::date) DESC, ro.member_id
LIMIT %s OFFSET %s LIMIT %s OFFSET %s
""", """,
(*proj_params, page_size, offset), (*proj_params, page_size, offset),
@@ -2121,7 +2148,7 @@ def get_customer_board_recent(
不再硬编码为 0。来源: v_dws_member_visit_detail + v_dim_member + v_dws_member_winback_index。 不再硬编码为 0。来源: v_dws_member_visit_detail + v_dim_member + v_dws_member_winback_index。
按 last_visit_date 降序。 按 last_visit_date 降序。
""" """
proj_clause, proj_params = _project_filter_clause(project) proj_clause, proj_params = _project_filter_clause(project, "vd.member_id")
with _fdw_context(conn, site_id) as cur: with _fdw_context(conn, site_id) as cur:
cur.execute( cur.execute(
@@ -2161,7 +2188,7 @@ def get_customer_board_recent(
ON ma.member_id = dm.member_id AND dm.scd2_is_current = 1 ON ma.member_id = dm.member_id AND dm.scd2_is_current = 1
LEFT JOIN app.v_dws_member_winback_index wi LEFT JOIN app.v_dws_member_winback_index wi
ON ma.member_id = wi.member_id ON ma.member_id = wi.member_id
ORDER BY ma.last_visit_date DESC ORDER BY ma.last_visit_date DESC, ma.member_id
LIMIT %s OFFSET %s LIMIT %s OFFSET %s
""", """,
(*proj_params, page_size, offset), (*proj_params, page_size, offset),
@@ -2199,15 +2226,21 @@ def get_customer_board_spend60(
high_spend_tag/avg_spend 不存在,用 avg_ticket_amount 替代 avg_spend high_spend_tag/avg_spend 不存在,用 avg_ticket_amount 替代 avg_spend
high_spend_tag 通过阈值计算。 high_spend_tag 通过阈值计算。
按 consume_amount_60d 降序。 按 consume_amount_60d 降序。
CHANGE 2026-04-08 | Fixconsumption_summary 按 stat_date 有多行快照,
用 DISTINCT ON 取最新快照避免同一客户出现多次。
""" """
proj_clause, proj_params = _project_filter_clause(project) proj_clause, proj_params = _project_filter_clause(project, "cs.member_id")
with _fdw_context(conn, site_id) as cur: with _fdw_context(conn, site_id) as cur:
cur.execute( cur.execute(
f""" f"""
SELECT COUNT(*) SELECT COUNT(*)
FROM (
SELECT DISTINCT ON (cs.member_id) cs.member_id
FROM app.v_dws_member_consumption_summary cs FROM app.v_dws_member_consumption_summary cs
WHERE 1=1 {proj_clause} WHERE 1=1 {proj_clause}
ORDER BY cs.member_id, cs.stat_date DESC
) sub
""", """,
proj_params, proj_params,
) )
@@ -2216,16 +2249,23 @@ def get_customer_board_spend60(
offset = (page - 1) * page_size offset = (page - 1) * page_size
cur.execute( cur.execute(
f""" f"""
WITH latest_cs AS (
SELECT DISTINCT ON (cs.member_id)
cs.member_id, cs.consume_amount_60d,
cs.visit_count_60d, cs.avg_ticket_amount
FROM app.v_dws_member_consumption_summary cs
WHERE 1=1 {proj_clause}
ORDER BY cs.member_id, cs.stat_date DESC
)
SELECT cs.member_id, SELECT cs.member_id,
dm.nickname, dm.nickname,
cs.consume_amount_60d, cs.consume_amount_60d,
cs.visit_count_60d, cs.visit_count_60d,
cs.avg_ticket_amount cs.avg_ticket_amount
FROM app.v_dws_member_consumption_summary cs FROM latest_cs cs
LEFT JOIN app.v_dim_member dm LEFT JOIN app.v_dim_member dm
ON cs.member_id = dm.member_id AND dm.scd2_is_current = 1 ON cs.member_id = dm.member_id AND dm.scd2_is_current = 1
WHERE 1=1 {proj_clause} ORDER BY cs.consume_amount_60d DESC, cs.member_id
ORDER BY cs.consume_amount_60d DESC
LIMIT %s OFFSET %s LIMIT %s OFFSET %s
""", """,
(*proj_params, page_size, offset), (*proj_params, page_size, offset),
@@ -2256,15 +2296,20 @@ def get_customer_board_freq60(
CHANGE 2026-03-20 | 修正列名items_sum_60d → consume_amount_60d, CHANGE 2026-03-20 | 修正列名items_sum_60d → consume_amount_60d,
avg_interval_days 不存在,用 60/visit_count_60d 近似计算。 avg_interval_days 不存在,用 60/visit_count_60d 近似计算。
按 visit_count_60d 降序。 按 visit_count_60d 降序。
CHANGE 2026-04-08 | Fix同 spend60DISTINCT ON 取最新快照。
""" """
proj_clause, proj_params = _project_filter_clause(project) proj_clause, proj_params = _project_filter_clause(project, "cs.member_id")
with _fdw_context(conn, site_id) as cur: with _fdw_context(conn, site_id) as cur:
cur.execute( cur.execute(
f""" f"""
SELECT COUNT(*) SELECT COUNT(*)
FROM (
SELECT DISTINCT ON (cs.member_id) cs.member_id
FROM app.v_dws_member_consumption_summary cs FROM app.v_dws_member_consumption_summary cs
WHERE 1=1 {proj_clause} WHERE 1=1 {proj_clause}
ORDER BY cs.member_id, cs.stat_date DESC
) sub
""", """,
proj_params, proj_params,
) )
@@ -2273,15 +2318,21 @@ def get_customer_board_freq60(
offset = (page - 1) * page_size offset = (page - 1) * page_size
cur.execute( cur.execute(
f""" f"""
WITH latest_cs AS (
SELECT DISTINCT ON (cs.member_id)
cs.member_id, cs.visit_count_60d, cs.consume_amount_60d
FROM app.v_dws_member_consumption_summary cs
WHERE 1=1 {proj_clause}
ORDER BY cs.member_id, cs.stat_date DESC
)
SELECT cs.member_id, SELECT cs.member_id,
dm.nickname, dm.nickname,
cs.visit_count_60d, cs.visit_count_60d,
cs.consume_amount_60d cs.consume_amount_60d
FROM app.v_dws_member_consumption_summary cs FROM latest_cs cs
LEFT JOIN app.v_dim_member dm LEFT JOIN app.v_dim_member dm
ON cs.member_id = dm.member_id AND dm.scd2_is_current = 1 ON cs.member_id = dm.member_id AND dm.scd2_is_current = 1
WHERE 1=1 {proj_clause} ORDER BY cs.visit_count_60d DESC, cs.member_id
ORDER BY cs.visit_count_60d DESC
LIMIT %s OFFSET %s LIMIT %s OFFSET %s
""", """,
(*proj_params, page_size, offset), (*proj_params, page_size, offset),
@@ -2316,20 +2367,21 @@ def _get_weekly_visits_batch(cur: Any, member_ids: list[int]) -> dict[int, list[
""" """
批量查询客户最近 8 周的到店次数(用于 freq60 维度柱状图)。 批量查询客户最近 8 周的到店次数(用于 freq60 维度柱状图)。
来源: app.v_dwd_assistant_service_log,按 ISO 周分组。 CHANGE 2026-04-07 | Fix-5数据源从 v_dwd_assistant_service_log 改为
v_dwd_settlement_headsettle_type IN (1,3)),与汇总维度口径一致。
返回 {member_id: [{val: int, pct: int}, ...]},固定 8 个元素。 返回 {member_id: [{val: int, pct: int}, ...]},固定 8 个元素。
""" """
cur.execute( cur.execute(
""" """
WITH weekly AS ( WITH weekly AS (
SELECT tenant_member_id AS member_id, SELECT member_id,
DATE_TRUNC('week', create_time::date) AS week_start, DATE_TRUNC('week', pay_time::date) AS week_start,
COUNT(*) AS cnt COUNT(DISTINCT pay_time::date) AS cnt
FROM app.v_dwd_assistant_service_log FROM app.v_dwd_settlement_head
WHERE tenant_member_id = ANY(%s) WHERE member_id = ANY(%s)
AND is_delete = 0 AND settle_type IN (1, 3)
AND create_time >= CURRENT_DATE - INTERVAL '56 days' AND pay_time >= CURRENT_DATE - INTERVAL '56 days'
GROUP BY tenant_member_id, DATE_TRUNC('week', create_time::date) GROUP BY member_id, DATE_TRUNC('week', pay_time::date)
) )
SELECT member_id, week_start, cnt SELECT member_id, week_start, cnt
FROM weekly FROM weekly
@@ -2379,7 +2431,7 @@ def get_customer_board_loyal(
来源: app.v_dws_member_assistant_relation_index。 来源: app.v_dws_member_assistant_relation_index。
按 max_rs最高亲密度降序。 按 max_rs最高亲密度降序。
""" """
proj_clause, proj_params = _project_filter_clause(project) proj_clause, proj_params = _project_filter_clause(project, "ri.member_id")
with _fdw_context(conn, site_id) as cur: with _fdw_context(conn, site_id) as cur:
cur.execute( cur.execute(
@@ -2403,7 +2455,7 @@ def get_customer_board_loyal(
FROM app.v_dws_member_assistant_relation_index ri FROM app.v_dws_member_assistant_relation_index ri
WHERE 1=1 {proj_clause} WHERE 1=1 {proj_clause}
GROUP BY ri.member_id GROUP BY ri.member_id
ORDER BY MAX(ri.rs_display) DESC ORDER BY MAX(ri.rs_display) DESC, ri.member_id
LIMIT %s OFFSET %s LIMIT %s OFFSET %s
) )
SELECT mt.member_id, SELECT mt.member_id,
@@ -2417,7 +2469,7 @@ def get_customer_board_loyal(
ON mt.member_id = dm.member_id AND dm.scd2_is_current = 1 ON mt.member_id = dm.member_id AND dm.scd2_is_current = 1
LEFT JOIN app.v_dim_assistant da LEFT JOIN app.v_dim_assistant da
ON mt.top_assistant_id = da.assistant_id AND da.scd2_is_current = 1 ON mt.top_assistant_id = da.assistant_id AND da.scd2_is_current = 1
ORDER BY mt.max_rs DESC ORDER BY mt.max_rs DESC, mt.member_id
""", """,
(*proj_params, page_size, offset), (*proj_params, page_size, offset),
) )

View File

@@ -1,20 +1,22 @@
# AI_CHANGELOG
# - 2026-03-20 | Prompt: H2 FDW→直连ETL统一改造 | _process_site() 中 fdw_etl.v_dwd_assistant_service_log
# 改为直连 ETL 库查询 app.v_dwd_assistant_service_log。使用 fdw_queries._fdw_context()。
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
""" """
召回完成检测器Recall Completion Detector 召回完成检测器Recall Completion Detector
ETL 数据更新后,直连 ETL 库读取助教服务记录, ETL 数据更新后,扫描所有 MAIN 关系对的结算记录,
匹配活跃任务标记 completed记录 completed_at 和 completed_task_type 快照, 记录广义召回事件recall_events匹配活跃任务标记 completed
触发 recall_completed 事件通知备注回溯重分类器 对所有到店的 MAIN 关联客户生成回访任务follow_up_visit
由 trigger_jobs 中的 recall_completion_check 配置驱动event: etl_data_updated 由 trigger_jobs 中的 recall_completion_check 配置驱动event: etl_data_updated
CHANGE 2026-04-08 | Fix-13 改造:
- 扫描范围从"有 active 任务的客户"扩大为"所有 os_label='MAIN' 的关联客户"
- 新增 recall_events 事件表记录广义召回(按天去重)
- 无 active 任务的客户到店也生成 follow_up_visit
""" """
import json import json
import logging import logging
from datetime import timedelta
from app.trace.decorators import trace_service from app.trace.decorators import trace_service
@@ -63,69 +65,37 @@ def run(payload: dict | None = None, job_id: int | None = None) -> dict:
""" """
召回完成检测主流程。 召回完成检测主流程。
1. 从 trigger_jobs 读取 last_run_at 作为增量过滤基准 CHANGE 2026-04-08 | Fix-13 改造:扫描所有 MAIN 关系对。
2. 获取所有 distinct site_id从 active 任务中) 1. 从 biz.sites 获取所有活跃门店
3. 对每个 site_idSET LOCAL app.current_site_id 后 2. 对每个 site_id通过 _fdw_context 扫描 MAIN 关系对的结算记录
通过 FDW 读取 v_dwd_assistant_service_log 中 service_time > last_run_at 的新增服务记录 3. 有结算 → 写 recall_events + 完成任务(如有)+ 生成回访
4. 对每条服务记录,查找 biz.coach_tasks 中匹配的
(site_id, assistant_id, member_id) 且 status='active' 的任务
5. 将匹配任务标记为 completed
- status = 'completed'
- completed_at = 服务时间
- completed_task_type = 当前 task_type快照
6. 记录 coach_task_history
7. 触发 fire_event('recall_completed', {site_id, assistant_id, member_id, service_time})
参数:
payload: 事件载荷event 触发时由 trigger_scheduler 传入)
job_id: 触发器 job ID由 trigger_scheduler 传入),用于在最终事务中
更新 last_run_at保证 handler 数据变更与 last_run_at 原子提交
返回: {"completed_count": int}
""" """
completed_count = 0 completed_count = 0
event_count = 0
conn = _get_connection() conn = _get_connection()
try: try:
# ── 1. 读取 last_run_at ── # ── 1. 从业务库获取所有活跃门店 ──
with conn.cursor() as cur: with conn.cursor() as cur:
cur.execute( cur.execute(
""" "SELECT site_id FROM biz.sites WHERE is_active = true"
SELECT last_run_at
FROM biz.trigger_jobs
WHERE job_name = 'recall_completion_check'
"""
)
row = cur.fetchone()
last_run_at = row[0] if row else None
conn.commit()
# ── 2. 获取所有有 active 任务的 distinct site_id ──
with conn.cursor() as cur:
cur.execute(
"""
SELECT DISTINCT site_id
FROM biz.coach_tasks
WHERE status = 'active'
"""
) )
site_ids = [r[0] for r in cur.fetchall()] site_ids = [r[0] for r in cur.fetchall()]
conn.commit() conn.commit()
# ── 3. 逐 site_id 读取新增服务记录 ── # ── 2. 逐 site_id 处理 ──
for site_id in site_ids: for site_id in site_ids:
try: try:
count = _process_site(conn, site_id, last_run_at) result = _process_site(conn, site_id)
completed_count += count completed_count += result["completed"]
event_count += result["events"]
except Exception: except Exception:
logger.exception( logger.exception(
"处理门店召回检测失败: site_id=%s", site_id "处理门店召回检测失败: site_id=%s", site_id
) )
conn.rollback() conn.rollback()
# ── 事务安全T5handler 成功后更新 last_run_at ── # ── 更新 last_run_at(兼容 trigger_scheduler 调度记录) ──
# job_id 由 trigger_scheduler 传入,在 handler 最终事务中更新
# handler 异常时此处不会执行异常向上传播last_run_at 不变
if job_id is not None: if job_id is not None:
from app.services.trigger_scheduler import update_job_last_run_at from app.services.trigger_scheduler import update_job_last_run_at
@@ -137,145 +107,196 @@ def run(payload: dict | None = None, job_id: int | None = None) -> dict:
finally: finally:
conn.close() conn.close()
logger.info("召回完成检测完成: completed_count=%d", completed_count) logger.info(
return {"completed_count": completed_count} "召回完成检测完成: completed_count=%d, event_count=%d",
completed_count, event_count,
)
return {"completed_count": completed_count, "event_count": event_count}
def _process_site(conn, site_id: int, last_run_at) -> int: def _process_site(conn, site_id: int) -> dict:
""" """
处理单个门店的召回完成检测。 处理单个门店的广义召回检测。
直连 ETL 库读取新增服务记录,匹配 active 任务并标记 completed。 CHANGE 2026-04-08 | Fix-13 改造:
返回本门店完成的任务数。 1. 从 ETL 查询所有 os_label='MAIN' 的 (assistant_id, member_id) 对
2. 批量查询这些客户的最新结算记录
3. 对每个有新结算的关系对:写 recall_events + 完成任务 + 生成回访
""" """
completed = 0 completed = 0
events = 0
# CHANGE 2026-03-20 | H2 FDW→直连ETL | fdw_etl.v_dwd_assistant_service_log → app.v_dwd_assistant_service_log
# intent: 修复 RLS 门店隔离失效postgres_fdw 不传递 GUC 参数)
# assumptions: _fdw_context 内部管理 ETL 连接conn 仅用于后续业务库操作
from app.services.fdw_queries import _fdw_context from app.services.fdw_queries import _fdw_context
# ── 1. 获取本门店所有 MAIN 关系对 ──
with _fdw_context(conn, site_id) as cur: with _fdw_context(conn, site_id) as cur:
if last_run_at is not None:
# 列名映射: FDW 外部表 assistant_id/member_id/service_time
# → RLS 视图 site_assistant_id/tenant_member_id/create_time
cur.execute( cur.execute(
""" """
SELECT DISTINCT site_assistant_id, tenant_member_id, create_time SELECT assistant_id, member_id
FROM app.v_dwd_assistant_service_log FROM app.v_dws_member_assistant_relation_index
WHERE create_time > %s WHERE os_label = 'MAIN'
ORDER BY create_time ASC
""",
(last_run_at,),
)
else:
# 首次运行,读取所有服务记录
cur.execute(
"""
SELECT DISTINCT site_assistant_id, tenant_member_id, create_time
FROM app.v_dwd_assistant_service_log
ORDER BY create_time ASC
""" """
) )
service_records = cur.fetchall() main_pairs = [(r[0], r[1]) for r in cur.fetchall()]
# ── 4-7. 逐条服务记录匹配并处理 ── if not main_pairs:
for assistant_id, member_id, service_time in service_records: return {"completed": 0, "events": 0}
# 散客过滤member_id ≤ 0 不参与任务系统)
if member_id is None or member_id <= 0: # ── 2. 批量查询这些客户的最新结算时间 ──
continue member_ids = list({mid for _, mid in main_pairs})
try: settlement_map: dict[tuple[int, int], object] = {} # (assistant_id, member_id) → latest_pay_time
count = _process_service_record(
conn, site_id, assistant_id, member_id, service_time with _fdw_context(conn, site_id) as cur:
cur.execute(
"""
SELECT sl.site_assistant_id AS assistant_id,
sh.member_id,
MAX(sh.pay_time) AS latest_pay_time
FROM app.v_dwd_settlement_head sh
JOIN app.v_dwd_assistant_service_log sl
ON sl.order_settle_id = sh.order_settle_id
AND sl.is_delete = 0
WHERE sh.member_id = ANY(%s)
AND sh.settle_type IN (1, 3)
GROUP BY sl.site_assistant_id, sh.member_id
""",
(member_ids,),
) )
completed += count for row in cur.fetchall():
settlement_map[(row[0], row[1])] = row[2]
# ── 3. 获取本门店所有 active 的召回/回访任务(用于匹配) ──
active_tasks_map: dict[tuple[int, int], list] = {} # (assistant_id, member_id) → [(id, task_type, created_at)]
with conn.cursor() as cur:
cur.execute(
"""
SELECT id, assistant_id, member_id, task_type, created_at
FROM biz.coach_tasks
WHERE site_id = %s
AND status = 'active'
AND task_type IN ('high_priority_recall', 'priority_recall', 'follow_up_visit')
""",
(site_id,),
)
for row in cur.fetchall():
key = (row[1], row[2])
active_tasks_map.setdefault(key, []).append(
{"id": row[0], "task_type": row[3], "created_at": row[4]}
)
conn.commit()
# ── 4. 逐关系对处理 ──
for assistant_id, member_id in main_pairs:
latest_pay = settlement_map.get((assistant_id, member_id))
if latest_pay is None:
continue
active_tasks = active_tasks_map.get((assistant_id, member_id), [])
try:
result = _process_pair(
conn, site_id, assistant_id, member_id,
latest_pay, active_tasks,
)
completed += result["completed"]
events += result["events"]
except Exception: except Exception:
logger.exception( logger.exception(
"处理服务记录失败: site_id=%s, assistant_id=%s, member_id=%s", "处理关系对失败: site_id=%s, assistant_id=%s, member_id=%s",
site_id, site_id, assistant_id, member_id,
assistant_id,
member_id,
) )
conn.rollback() conn.rollback()
return completed return {"completed": completed, "events": events}
def _process_service_record( def _process_pair(
conn, conn,
site_id: int, site_id: int,
assistant_id: int, assistant_id: int,
member_id: int, member_id: int,
service_time, latest_pay_time,
) -> int: active_tasks: list[dict],
) -> dict:
""" """
处理单条服务记录:匹配 active 任务并标记 completed + 生成回访任务 处理单个 MAIN 关系对的召回检测
CHANGE 2026-03-30 | 回访任务直接在此生成(不再依赖 note_reclassifier 事件链)。 CHANGE 2026-04-08 | Fix-13 改造:
规则: - 写 recall_eventsON CONFLICT DO NOTHING 按天去重)
- 有 active 召回任务 → 标记 completed然后生成回访任务 - 有 active 召回任务且 pay_time > created_at → 完成任务
- 有 active 回访任务 → 关闭旧回访,生成新回访(重置 48h 倒计时 - 关闭旧回访 → 新建回访48h
- 无任何 active 召回/回访 → 直接生成回访任务 - 无 active 任务也生成回访
每条服务记录独立事务,失败不影响其他。 返回: {"completed": int, "events": int}
返回本次完成的任务数。
""" """
completed = 0 completed = 0
events = 0
with conn.cursor() as cur: with conn.cursor() as cur:
cur.execute("BEGIN") cur.execute("BEGIN")
# ── 1. 查找匹配的 active 召回类任务 ── # ── 1. 写 recall_events按天去重 ──
# 先查是否有匹配的召回任务(用于填充 task_id/task_type
recall_tasks = [
t for t in active_tasks
if t["task_type"] in ("high_priority_recall", "priority_recall")
and latest_pay_time > t["created_at"]
]
event_task_id = recall_tasks[0]["id"] if recall_tasks else None
event_task_type = recall_tasks[0]["task_type"] if recall_tasks else None
cur.execute( cur.execute(
""" """
SELECT id, task_type INSERT INTO biz.recall_events
FROM biz.coach_tasks (site_id, assistant_id, member_id, pay_time, task_id, task_type)
WHERE site_id = %s VALUES (%s, %s, %s, %s, %s, %s)
AND assistant_id = %s ON CONFLICT (site_id, assistant_id, member_id, (date_trunc('day', pay_time AT TIME ZONE 'Asia/Shanghai')))
AND member_id = %s DO NOTHING
AND status = 'active' RETURNING id
AND task_type IN ('high_priority_recall', 'priority_recall')
""", """,
(site_id, assistant_id, member_id), (site_id, assistant_id, member_id, latest_pay_time,
event_task_id, event_task_type),
) )
active_recall_tasks = cur.fetchall() inserted = cur.fetchone()
if inserted is None:
# 今天已记录过,跳过后续处理(避免重复生成回访)
conn.commit()
return {"completed": 0, "events": 0}
has_active_recall = len(active_recall_tasks) > 0 events = 1
# 将所有匹配的 active 召回任务标记为 completed # ── 2. 完成匹配的召回任务 ──
for task_id, task_type in active_recall_tasks: has_active_recall = len(recall_tasks) > 0
for task in recall_tasks:
cur.execute( cur.execute(
""" """
UPDATE biz.coach_tasks UPDATE biz.coach_tasks
SET status = 'completed', SET status = 'completed',
completed_at = %s, completed_at = %s,
completed_task_type = %s, completed_task_type = %s,
completion_type = 'auto',
updated_at = NOW() updated_at = NOW()
WHERE id = %s AND status = 'active' WHERE id = %s AND status = 'active'
""", """,
(service_time, task_type, task_id), (latest_pay_time, task["task_type"], task["id"]),
) )
_insert_history( _insert_history(
cur, cur,
task_id, task["id"],
action="completed", action="completed",
old_status="active", old_status="active",
new_status="completed", new_status="completed",
old_task_type=task_type, old_task_type=task["task_type"],
new_task_type=task_type, new_task_type=task["task_type"],
detail={ detail={
"service_time": str(service_time), "service_time": str(latest_pay_time),
"completed_task_type": task_type, "completed_task_type": task["task_type"],
}, },
) )
completed += 1 completed += 1
# ── 2. 生成回访任务CHANGE 2026-03-30 ── # ── 3. 关闭已有的 active 回访任务 ──
# 如果还有 active 召回任务(其他助教的),不生成回访
# 注意:上面已经把当前助教的召回任务标记为 completed 了
# 这里检查的是当前助教-客户对是否还有未完成的召回任务(不应该有了)
# 关闭已有的 active 回访任务
cur.execute( cur.execute(
""" """
SELECT id FROM biz.coach_tasks SELECT id FROM biz.coach_tasks
@@ -299,16 +320,19 @@ def _process_service_record(
action="superseded_by_new_visit", action="superseded_by_new_visit",
old_status="active", new_status="inactive", old_status="active", new_status="inactive",
old_task_type="follow_up_visit", new_task_type="follow_up_visit", old_task_type="follow_up_visit", new_task_type="follow_up_visit",
detail={"reason": "new_service_record", "service_time": str(service_time)}, detail={"reason": "new_service_record", "service_time": str(latest_pay_time)},
) )
# 创建新的回访任务48h 过期) # ── 4. 创建新的回访任务48h 过期) ──
from datetime import timedelta expires_at = (
expires_at = service_time + timedelta(hours=48) if hasattr(service_time, '__add__') else None latest_pay_time + timedelta(hours=48)
if hasattr(latest_pay_time, '__add__') else None
)
cur.execute( cur.execute(
""" """
INSERT INTO biz.coach_tasks INSERT INTO biz.coach_tasks
(site_id, assistant_id, member_id, task_type, status, expires_at, created_at, updated_at) (site_id, assistant_id, member_id, task_type, status,
expires_at, created_at, updated_at)
VALUES (%s, %s, %s, 'follow_up_visit', 'active', %s, NOW(), NOW()) VALUES (%s, %s, %s, 'follow_up_visit', 'active', %s, NOW(), NOW())
RETURNING id RETURNING id
""", """,
@@ -322,14 +346,14 @@ def _process_service_record(
new_task_type="follow_up_visit", new_task_type="follow_up_visit",
detail={ detail={
"reason": "service_record_detected", "reason": "service_record_detected",
"service_time": str(service_time), "service_time": str(latest_pay_time),
"had_recall": has_active_recall, "had_recall": has_active_recall,
}, },
) )
conn.commit() conn.commit()
# ── 3. 触发 recall_completed 事件(仅当有召回任务被完成时) ── # ── 5. 触发 recall_completed 事件(仅当有召回任务被完成时) ──
if has_active_recall: if has_active_recall:
try: try:
from app.services.trigger_scheduler import fire_event from app.services.trigger_scheduler import fire_event
@@ -339,7 +363,7 @@ def _process_service_record(
"site_id": site_id, "site_id": site_id,
"assistant_id": assistant_id, "assistant_id": assistant_id,
"member_id": member_id, "member_id": member_id,
"service_time": str(service_time), "service_time": str(latest_pay_time),
}, },
) )
except Exception: except Exception:
@@ -348,4 +372,4 @@ def _process_service_record(
site_id, assistant_id, member_id, site_id, assistant_id, member_id,
) )
return completed return {"completed": completed, "events": events}

View File

@@ -998,8 +998,8 @@ def _update_task_stats(conn, site_id: int) -> None:
# ── C: 历史总计(写入 ETL 关系指数表) ── # ── C: 历史总计(写入 ETL 关系指数表) ──
try: try:
from app.database import get_etl_readonly_connection from app.database import get_etl_write_connection
etl_conn = get_etl_readonly_connection(site_id) etl_conn = get_etl_write_connection()
try: try:
# 先从业务库聚合历史总计 # 先从业务库聚合历史总计
with conn.cursor() as cur: with conn.cursor() as cur:

View File

@@ -95,6 +95,7 @@ DWS_TASKS: list[TaskDefinition] = [
TaskDefinition("DWS_FINANCE_RECHARGE", "充值汇总", "汇总充值数据", "财务", "DWS"), TaskDefinition("DWS_FINANCE_RECHARGE", "充值汇总", "汇总充值数据", "财务", "DWS"),
TaskDefinition("DWS_FINANCE_INCOME_STRUCTURE", "收入结构", "分析收入结构", "财务", "DWS"), TaskDefinition("DWS_FINANCE_INCOME_STRUCTURE", "收入结构", "分析收入结构", "财务", "DWS"),
TaskDefinition("DWS_FINANCE_DISCOUNT_DETAIL", "折扣明细", "汇总折扣明细", "财务", "DWS"), TaskDefinition("DWS_FINANCE_DISCOUNT_DETAIL", "折扣明细", "汇总折扣明细", "财务", "DWS"),
TaskDefinition("DWS_FINANCE_AREA_DAILY", "区域财务日报", "按区域汇总每日财务数据", "财务", "DWS"),
# CHANGE [2026-02-19] intent: 同步 ETL 侧合并——原 DWS_RETENTION_CLEANUP / DWS_MV_REFRESH_* 已合并为 DWS_MAINTENANCE # CHANGE [2026-02-19] intent: 同步 ETL 侧合并——原 DWS_RETENTION_CLEANUP / DWS_MV_REFRESH_* 已合并为 DWS_MAINTENANCE
TaskDefinition("DWS_MAINTENANCE", "DWS 维护", "刷新物化视图 + 清理过期留存数据", "通用", "DWS", requires_window=False, is_common=False), TaskDefinition("DWS_MAINTENANCE", "DWS 维护", "刷新物化视图 + 清理过期留存数据", "通用", "DWS", requires_window=False, is_common=False),
# CHANGE [2026-03-27] intent: 注册 DWS 库存汇总任务(日/周/月)(需求 12.9 # CHANGE [2026-03-27] intent: 注册 DWS 库存汇总任务(日/周/月)(需求 12.9

View File

@@ -26,7 +26,7 @@ SCHEMA_ETL=meta
# API 配置(上游 SaaS API # API 配置(上游 SaaS API
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
API_BASE=https://pc.ficoo.vip/apiprod/admin/v1/ API_BASE=https://pc.ficoo.vip/apiprod/admin/v1/
API_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjbGllbnQtdHlwZSI6IjQiLCJ1c2VyLXR5cGUiOiIxIiwiaHR0cDovL3NjaGVtYXMubWljcm9zb2Z0LmNvbS93cy8yMDA4LzA2L2lkZW50aXR5L2NsYWltcy9yb2xlIjoiMTIiLCJyb2xlLWlkIjoiMTIiLCJ0ZW5hbnQtaWQiOiIyNzkwNjgzMTYwNzA5OTU3Iiwibmlja25hbWUiOiLnp5_miLfnrqHnkIblkZjvvJrmganmgakxIiwic2l0ZS1pZCI6IjAiLCJtb2JpbGUiOiIxMzgxMDUwMjMwNCIsInNpZCI6IjI5NTA0ODk2NTgzOTU4NDUiLCJzdGFmZi1pZCI6IjMwMDk5MTg2OTE1NTkwNDUiLCJvcmctaWQiOiIwIiwicm9sZS10eXBlIjoiMyIsInJlZnJlc2hUb2tlbiI6IkZJUUxIWWJLSFl5QktJQlVuLzZuQVdINitqOEpLbGNHN1NScDFFaFNuMEE9IiwicmVmcmVzaEV4cGlyeVRpbWUiOiIyMDI2LzQvNyDkuIrljYg0OjU2OjA4IiwibmVlZENoZWNrVG9rZW4iOiJmYWxzZSIsImV4cCI6MTc3NTUwODk2OCwiaXNzIjoidGVzdCIsImF1ZCI6IlVzZXIifQ.fc92FpZrLBkV9tD9sNPCvMvgNePh5Y7T6g5FLx8N16A API_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJjbGllbnQtdHlwZSI6IjQiLCJ1c2VyLXR5cGUiOiIxIiwiaHR0cDovL3NjaGVtYXMubWljcm9zb2Z0LmNvbS93cy8yMDA4LzA2L2lkZW50aXR5L2NsYWltcy9yb2xlIjoiMTIiLCJyb2xlLWlkIjoiMTIiLCJ0ZW5hbnQtaWQiOiIyNzkwNjgzMTYwNzA5OTU3Iiwibmlja25hbWUiOiLnp5_miLfnrqHnkIblkZjvvJrmganmgakxIiwic2l0ZS1pZCI6IjAiLCJtb2JpbGUiOiIxMzgxMDUwMjMwNCIsInNpZCI6IjI5NTA0ODk2NTgzOTU4NDUiLCJzdGFmZi1pZCI6IjMwMDk5MTg2OTE1NTkwNDUiLCJvcmctaWQiOiIwIiwicm9sZS10eXBlIjoiMyIsInJlZnJlc2hUb2tlbiI6Ik9OUTkreFhSWjFPVFhzQWhieTJVa3RyVXR6UzdldVE5Q1VVQ3QzQ1ArMlE9IiwicmVmcmVzaEV4cGlyeVRpbWUiOiIyMDI2LzQvMTQg5LiL5Y2INTo0MToyNSIsIm5lZWRDaGVja1Rva2VuIjoiZmFsc2UiLCJleHAiOjE3NzYxNTk2ODUsImlzcyI6InRlc3QiLCJhdWQiOiJVc2VyIn0.KyULo2a6dirmiAkka5Ocu_ieoZY5VsVWVqMj5smwvmE
API_TIMEOUT=20 API_TIMEOUT=20
API_PAGE_SIZE=200 API_PAGE_SIZE=200
API_RETRY_MAX=3 API_RETRY_MAX=3

View File

@@ -1,9 +1,15 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
"""数据库连接管理器(限制最大连接超时时间)。""" """数据库连接管理器(限制最大连接超时时间)。"""
import time
import psycopg2 import psycopg2
import psycopg2.extras import psycopg2.extras
# 连接重试参数:应对 PostgreSQL 瞬时不可用
_CONNECT_MAX_RETRIES = 3
_CONNECT_RETRY_DELAY = 1.0
class DatabaseConnection: class DatabaseConnection:
"""封装 psycopg2 连接,支持会话参数和超时保护。""" """封装 psycopg2 连接,支持会话参数和超时保护。"""
@@ -24,11 +30,25 @@ class DatabaseConnection:
# assumptions: libpq 默认使用系统 locale 的 client_encodingWindows 中文系统为 GBK/CP936 # assumptions: libpq 默认使用系统 locale 的 client_encodingWindows 中文系统为 GBK/CP936
# 边界: 显式指定 client_encoding=utf8 确保连接层始终使用 UTF-8与数据库 server_encoding 一致 # 边界: 显式指定 client_encoding=utf8 确保连接层始终使用 UTF-8与数据库 server_encoding 一致
# 验证: web-admin 手动触发 ETL 全量 flow不再出现 0xd6 解码错误 # 验证: web-admin 手动触发 ETL 全量 flow不再出现 0xd6 解码错误
last_exc = None
for attempt in range(_CONNECT_MAX_RETRIES):
try:
conn = psycopg2.connect( conn = psycopg2.connect(
self._dsn, self._dsn,
connect_timeout=timeout_val, connect_timeout=timeout_val,
options="-c client_encoding=utf8", options="-c client_encoding=utf8",
keepalives=1,
keepalives_idle=60,
keepalives_interval=10,
keepalives_count=3,
) )
break
except psycopg2.OperationalError as e:
last_exc = e
if attempt < _CONNECT_MAX_RETRIES - 1:
time.sleep(_CONNECT_RETRY_DELAY * (attempt + 1))
else:
raise last_exc
conn.autocommit = False conn.autocommit = False
# 会话参数(时区、语句超时等) # 会话参数(时区、语句超时等)

View File

@@ -31,6 +31,16 @@ class DatabaseOperations:
def _connect_timeout(self): def _connect_timeout(self):
return self._connection._connect_timeout return self._connection._connect_timeout
def ensure_open(self) -> bool:
"""透传 DatabaseConnection.ensure_open(),重连后同步 self.conn 引用。"""
result = self._connection.ensure_open()
self.conn = self._connection.conn
return result
def rollback(self):
"""透传 DatabaseConnection.rollback()。"""
self._connection.rollback()
def batch_execute(self, sql: str, rows: list, page_size: int = 1000): def batch_execute(self, sql: str, rows: list, page_size: int = 1000):
"""批量执行SQL""" """批量执行SQL"""
if not rows: if not rows:

View File

@@ -224,6 +224,9 @@ class FlowRunner:
summary_text = flow_logger.end(status="成功") summary_text = flow_logger.end(status="成功")
self.logger.info("\n%s", summary_text) self.logger.info("\n%s", summary_text)
# CHANGE 2026-04-07 | Fix-12ETL 完成后通知后端触发任务编排
self._notify_backend_etl_completed(run_label)
return { return {
"status": "SUCCESS", "status": "SUCCESS",
"flow": run_label, "flow": run_label,
@@ -243,6 +246,33 @@ class FlowRunner:
self.logger.error("\n%s", summary_text) self.logger.error("\n%s", summary_text)
raise raise
def _notify_backend_etl_completed(self, pipeline: str) -> None:
"""ETL 完成后通知后端触发任务编排recall_detector → task_generator
CHANGE 2026-04-07 | Fix-12失败不阻断主流程仅记录警告。
"""
import os
backend_url = os.getenv("BACKEND_API_URL", "http://127.0.0.1:8000")
internal_token = os.getenv("INTERNAL_API_TOKEN", "")
if not internal_token:
self.logger.warning("ETL 完成回调跳过INTERNAL_API_TOKEN 未配置")
return
try:
import httpx
resp = httpx.post(
f"{backend_url}/api/internal/etl-completed",
json={"pipeline": pipeline},
headers={"Authorization": f"Internal-Token {internal_token}"},
timeout=300,
)
if resp.status_code == 200:
self.logger.info("ETL 完成回调成功: %s", resp.json())
else:
self.logger.warning("ETL 完成回调失败: status=%d, body=%s", resp.status_code, resp.text)
except Exception:
self.logger.warning("ETL 完成回调异常(不阻断主流程)", exc_info=True)
def _run_post_consistency_check(self, timer: EtlTimer) -> str | None: def _run_post_consistency_check(self, timer: EtlTimer) -> str | None:
"""ETL 完成后运行数据一致性检查,输出黑盒测试报告。 """ETL 完成后运行数据一致性检查,输出黑盒测试报告。

View File

@@ -21,6 +21,8 @@ from pathlib import Path
from typing import Any, Dict, List from typing import Any, Dict, List
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
import psycopg2
from api.recording_client import RecordingAPIClient from api.recording_client import RecordingAPIClient
from api.local_json_client import LocalJsonClient from api.local_json_client import LocalJsonClient
from orchestration.cursor_manager import CursorManager from orchestration.cursor_manager import CursorManager
@@ -91,6 +93,17 @@ class TaskExecutor:
self.logger.info("开始运行任务: %s, run_uuid=%s", task_codes, run_uuid) self.logger.info("开始运行任务: %s, run_uuid=%s", task_codes, run_uuid)
for task_code in task_codes: for task_code in task_codes:
# 每个任务前检测连接状态,断线自动重连
if not self.db_ops.ensure_open():
self.logger.error("数据库连接不可用且重连失败,跳过任务 %s", task_code)
results.append({
"task_code": task_code,
"status": "失败",
"error": "数据库连接不可用",
"counts": {},
})
continue
# 为每个任务创建独立的日志缓冲区,避免多任务日志交叉 # 为每个任务创建独立的日志缓冲区,避免多任务日志交叉
task_log_buf = TaskLogBuffer(task_code, self.logger) task_log_buf = TaskLogBuffer(task_code, self.logger)
try: try:
@@ -113,9 +126,13 @@ class TaskExecutor:
task_log_buf.error("任务失败: %s", exc) task_log_buf.error("任务失败: %s", exc)
# CHANGE 2026-02-24 | 任务失败后 rollback防止 InFailedSqlTransaction 级联 # CHANGE 2026-02-24 | 任务失败后 rollback防止 InFailedSqlTransaction 级联
try: try:
self.db.rollback() self.db_ops.rollback()
except Exception: except Exception:
pass pass
# 连接级异常:尝试重连,避免后续任务全部级联失败
if isinstance(exc, (psycopg2.OperationalError, psycopg2.InterfaceError)):
self.logger.warning("检测到连接异常,尝试重连...")
self.db_ops.ensure_open()
results.append({ results.append({
"task_code": task_code, "task_code": task_code,
"status": "失败", "status": "失败",

View File

@@ -43,6 +43,7 @@ from tasks.dws import (
FinanceRechargeTask, FinanceRechargeTask,
FinanceIncomeStructureTask, FinanceIncomeStructureTask,
FinanceDiscountDetailTask, FinanceDiscountDetailTask,
FinanceAreaDailyTask,
# 库存汇总任务 # 库存汇总任务
GoodsStockDailyTask, GoodsStockDailyTask,
GoodsStockWeeklyTask, GoodsStockWeeklyTask,
@@ -167,6 +168,7 @@ default_registry.register("DWS_FINANCE_DAILY", FinanceDailyTask, layer="DWS")
default_registry.register("DWS_FINANCE_RECHARGE", FinanceRechargeTask, layer="DWS") default_registry.register("DWS_FINANCE_RECHARGE", FinanceRechargeTask, layer="DWS")
default_registry.register("DWS_FINANCE_INCOME_STRUCTURE", FinanceIncomeStructureTask, layer="DWS") default_registry.register("DWS_FINANCE_INCOME_STRUCTURE", FinanceIncomeStructureTask, layer="DWS")
default_registry.register("DWS_FINANCE_DISCOUNT_DETAIL", FinanceDiscountDetailTask, layer="DWS") default_registry.register("DWS_FINANCE_DISCOUNT_DETAIL", FinanceDiscountDetailTask, layer="DWS")
default_registry.register("DWS_FINANCE_AREA_DAILY", FinanceAreaDailyTask, layer="DWS", depends_on=["DWS_FINANCE_DAILY"])
# CHANGE [2026-03-27] intent: 移除对 DWD_LOAD_FROM_ODS 的显式依赖dwd_dws Flow 下该依赖不在批次内只产生无意义 warning # CHANGE [2026-03-27] intent: 移除对 DWD_LOAD_FROM_ODS 的显式依赖dwd_dws Flow 下该依赖不在批次内只产生无意义 warning
default_registry.register("DWS_GOODS_STOCK_DAILY", GoodsStockDailyTask, layer="DWS") default_registry.register("DWS_GOODS_STOCK_DAILY", GoodsStockDailyTask, layer="DWS")
default_registry.register("DWS_GOODS_STOCK_WEEKLY", GoodsStockWeeklyTask, layer="DWS") default_registry.register("DWS_GOODS_STOCK_WEEKLY", GoodsStockWeeklyTask, layer="DWS")
@@ -182,6 +184,7 @@ default_registry.register("DWS_MAINTENANCE", DwsMaintenanceTask, layer="DWS", de
"DWS_ASSISTANT_PROJECT_TAG", "DWS_MEMBER_PROJECT_TAG", "DWS_ASSISTANT_PROJECT_TAG", "DWS_MEMBER_PROJECT_TAG",
"DWS_FINANCE_DAILY", "DWS_FINANCE_RECHARGE", "DWS_FINANCE_DAILY", "DWS_FINANCE_RECHARGE",
"DWS_FINANCE_INCOME_STRUCTURE", "DWS_FINANCE_DISCOUNT_DETAIL", "DWS_FINANCE_INCOME_STRUCTURE", "DWS_FINANCE_DISCOUNT_DETAIL",
"DWS_FINANCE_AREA_DAILY",
"DWS_BUILD_ORDER_SUMMARY", "DWS_BUILD_ORDER_SUMMARY",
"DWS_GOODS_STOCK_DAILY", "DWS_GOODS_STOCK_WEEKLY", "DWS_GOODS_STOCK_MONTHLY", "DWS_GOODS_STOCK_DAILY", "DWS_GOODS_STOCK_WEEKLY", "DWS_GOODS_STOCK_MONTHLY",
]) ])

View File

@@ -9,6 +9,7 @@ dependencies = [
"tzdata>=2023.0", "tzdata>=2023.0",
"python-dotenv", "python-dotenv",
"openpyxl>=3.1.0", "openpyxl>=3.1.0",
"httpx>=0.24.0",
"neozqyy-shared", "neozqyy-shared",
] ]

View File

@@ -373,6 +373,8 @@ def transform_area_daily(
) )
# 收集所有涉及的日期 # 收集所有涉及的日期
all_dates: set[date] = set() all_dates: set[date] = set()
# 未知区域名称计数(汇总后一次性输出,避免逐行 warning 产生大量日志噪音)
_unknown_area_counts: Dict[str, int] = defaultdict(int)
for row in settlement_rows: for row in settlement_rows:
sd = row.get("stat_date") sd = row.get("stat_date")
@@ -389,11 +391,7 @@ def transform_area_daily(
area_code = resolve_area_code(area_name) area_code = resolve_area_code(area_name)
if area_code is None: if area_code is None:
# 未知区域:记录警告,不计入具体区域行,但仍计入 all 行 _unknown_area_counts[str(area_name)] += 1
logger.warning(
"DWS_FINANCE_AREA_DAILY: 未知区域名称 '%s',不计入具体区域",
area_name,
)
# 提取金额 # 提取金额
table_fee = safe_decimal_fn(row.get("table_fee_amount", 0)) table_fee = safe_decimal_fn(row.get("table_fee_amount", 0))
@@ -464,6 +462,15 @@ def transform_area_daily(
for k, v in fields.items(): for k, v in fields.items():
bucket[k] = bucket[k] + v bucket[k] = bucket[k] + v
# 汇总输出未知区域名称(避免逐行 warning 刷屏)
if _unknown_area_counts:
summary = ", ".join(f"'{k}': {v}" for k, v in _unknown_area_counts.items())
logger.warning(
"DWS_FINANCE_AREA_DAILY: 共 %d 条结算单区域未匹配(不计入具体区域,仅计入 all: %s",
sum(_unknown_area_counts.values()),
summary,
)
# 也收集 global_summary 中的日期 # 也收集 global_summary 中的日期
for sd in global_index: for sd in global_index:
all_dates.add(sd) all_dates.add(sd)

View File

@@ -104,10 +104,10 @@ class FinanceBaseTask(BaseDwsTask):
SUM(CASE WHEN is_first = 1 THEN pay_amount + point_amount ELSE 0 END) AS first_recharge_total, SUM(CASE WHEN is_first = 1 THEN pay_amount + point_amount ELSE 0 END) AS first_recharge_total,
SUM(CASE WHEN is_first = 1 THEN pay_amount ELSE 0 END) AS first_recharge_cash, SUM(CASE WHEN is_first = 1 THEN pay_amount ELSE 0 END) AS first_recharge_cash,
SUM(CASE WHEN is_first = 1 THEN point_amount ELSE 0 END) AS first_recharge_gift, SUM(CASE WHEN is_first = 1 THEN point_amount ELSE 0 END) AS first_recharge_gift,
COUNT(CASE WHEN is_first = 0 OR is_first IS NULL THEN 1 END) AS renewal_count, COUNT(CASE WHEN is_first = 2 THEN 1 END) AS renewal_count,
SUM(CASE WHEN is_first = 0 OR is_first IS NULL THEN pay_amount + point_amount ELSE 0 END) AS renewal_total, SUM(CASE WHEN is_first = 2 THEN pay_amount + point_amount ELSE 0 END) AS renewal_total,
SUM(CASE WHEN is_first = 0 OR is_first IS NULL THEN pay_amount ELSE 0 END) AS renewal_cash, SUM(CASE WHEN is_first = 2 THEN pay_amount ELSE 0 END) AS renewal_cash,
SUM(CASE WHEN is_first = 0 OR is_first IS NULL THEN point_amount ELSE 0 END) AS renewal_gift, SUM(CASE WHEN is_first = 2 THEN point_amount ELSE 0 END) AS renewal_gift,
COUNT(DISTINCT member_id) AS recharge_member_count COUNT(DISTINCT member_id) AS recharge_member_count
FROM dwd.dwd_recharge_order FROM dwd.dwd_recharge_order
WHERE site_id = %s WHERE site_id = %s

View File

@@ -139,10 +139,10 @@ class FinanceRechargeTask(FinanceBaseTask):
SUM(CASE WHEN is_first = 1 THEN pay_amount ELSE 0 END) AS first_recharge_cash, SUM(CASE WHEN is_first = 1 THEN pay_amount ELSE 0 END) AS first_recharge_cash,
SUM(CASE WHEN is_first = 1 THEN point_amount ELSE 0 END) AS first_recharge_gift, SUM(CASE WHEN is_first = 1 THEN point_amount ELSE 0 END) AS first_recharge_gift,
SUM(CASE WHEN is_first = 1 THEN pay_amount + point_amount ELSE 0 END) AS first_recharge_total, SUM(CASE WHEN is_first = 1 THEN pay_amount + point_amount ELSE 0 END) AS first_recharge_total,
COUNT(CASE WHEN is_first != 1 OR is_first IS NULL THEN 1 END) AS renewal_count, COUNT(CASE WHEN is_first = 2 THEN 1 END) AS renewal_count,
SUM(CASE WHEN is_first != 1 OR is_first IS NULL THEN pay_amount ELSE 0 END) AS renewal_cash, SUM(CASE WHEN is_first = 2 THEN pay_amount ELSE 0 END) AS renewal_cash,
SUM(CASE WHEN is_first != 1 OR is_first IS NULL THEN point_amount ELSE 0 END) AS renewal_gift, SUM(CASE WHEN is_first = 2 THEN point_amount ELSE 0 END) AS renewal_gift,
SUM(CASE WHEN is_first != 1 OR is_first IS NULL THEN pay_amount + point_amount ELSE 0 END) AS renewal_total, SUM(CASE WHEN is_first = 2 THEN pay_amount + point_amount ELSE 0 END) AS renewal_total,
COUNT(DISTINCT member_id) AS recharge_member_count, COUNT(DISTINCT member_id) AS recharge_member_count,
COUNT(DISTINCT CASE WHEN is_first = 1 THEN member_id END) AS new_member_count COUNT(DISTINCT CASE WHEN is_first = 1 THEN member_id END) AS new_member_count
FROM dwd.dwd_recharge_order FROM dwd.dwd_recharge_order
@@ -198,20 +198,19 @@ class FinanceRechargeTask(FinanceBaseTask):
CASH_CARD_TYPE_ID = 2793249295533893 CASH_CARD_TYPE_ID = 2793249295533893
GIFT_CARD_TYPE_IDS = [2791990152417157, 2793266846533445, 2794699703437125] GIFT_CARD_TYPE_IDS = [2791990152417157, 2793266846533445, 2794699703437125]
# CHANGE 2026-02-21 | dim_member_card_account 无 site_id 字段,改用 register_site_id # CHANGE 2026-04-08 | 修复卡余额快照失真:改为门店全量会员
# CHANGE 2026-02-22 | 需求 B通过事实表反查支持跨店消费会员 # 原逻辑通过 dwd_recharge_order 反查,遗漏"有卡无充值"会员(赠卡、活动等)
# 余额代表门店预收负债,应覆盖所有持卡会员
sql = """ sql = """
SELECT card_type_id, SUM(balance) AS total_balance SELECT dca.card_type_id, SUM(dca.balance) AS total_balance
FROM dwd.dim_member_card_account FROM dwd.dim_member_card_account dca
WHERE tenant_member_id IN ( JOIN dwd.dim_member m
SELECT DISTINCT member_id ON dca.tenant_member_id = m.member_id
FROM dwd.dwd_recharge_order AND m.scd2_is_current = 1
WHERE site_id = %s WHERE m.register_site_id = %s
AND member_id IS NOT NULL AND dca.scd2_is_current = 1
AND member_id != 0 AND COALESCE(dca.is_delete, 0) = 0
) AND scd2_is_current = 1 GROUP BY dca.card_type_id
AND COALESCE(is_delete, 0) = 0
GROUP BY card_type_id
""" """
rows = self.db.query(sql, (site_id,)) rows = self.db.query(sql, (site_id,))

View File

@@ -36,6 +36,7 @@ class MemberActivityData:
days_since_last_recharge: Optional[int] = None days_since_last_recharge: Optional[int] = None
visits_14d: int = 0 visits_14d: int = 0
visits_30d: int = 0
visits_60d: int = 0 visits_60d: int = 0
visits_total: int = 0 visits_total: int = 0
@@ -451,8 +452,9 @@ class MemberIndexBaseTask(BaseIndexTask):
last_visit_time = max(r.get('last_visit_time') for r in day_rows_sorted) last_visit_time = max(r.get('last_visit_time') for r in day_rows_sorted)
data.last_visit_time = last_visit_time data.last_visit_time = last_visit_time
# 近14/60天到店次数 # 近14/30/60天到店次数
days_14_ago = base_date - timedelta(days=14) days_14_ago = base_date - timedelta(days=14)
days_30_ago_visit = base_date - timedelta(days=30)
days_60_ago = base_date - timedelta(days=60) days_60_ago = base_date - timedelta(days=60)
for r in day_rows_sorted: for r in day_rows_sorted:
visit_date = r.get('visit_date') visit_date = r.get('visit_date')
@@ -460,6 +462,8 @@ class MemberIndexBaseTask(BaseIndexTask):
continue continue
if visit_date >= days_14_ago: if visit_date >= days_14_ago:
data.visits_14d += 1 data.visits_14d += 1
if visit_date >= days_30_ago_visit:
data.visits_30d += 1
if visit_date >= days_60_ago: if visit_date >= days_60_ago:
data.visits_60d += 1 data.visits_60d += 1

View File

@@ -317,7 +317,7 @@ class NewconvIndexTask(MemberIndexBaseTask):
status, segment, status, segment,
member_create_time, first_visit_time, last_visit_time, last_recharge_time, member_create_time, first_visit_time, last_visit_time, last_recharge_time,
t_v, t_r, t_a, t_v, t_r, t_a,
visits_14d, visits_60d, visits_total, visits_14d, visits_30d, visits_60d, visits_total,
spend_30d, spend_180d, sv_balance, recharge_60d_amt, spend_30d, spend_180d, sv_balance, recharge_60d_amt,
interval_count, interval_count,
need_new, salvage_new, recharge_new, value_new, need_new, salvage_new, recharge_new, value_new,
@@ -331,7 +331,7 @@ class NewconvIndexTask(MemberIndexBaseTask):
%s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
@@ -352,7 +352,7 @@ class NewconvIndexTask(MemberIndexBaseTask):
data.status, data.segment, data.status, data.segment,
activity.member_create_time, activity.first_visit_time, activity.last_visit_time, activity.last_recharge_time, activity.member_create_time, activity.first_visit_time, activity.last_visit_time, activity.last_recharge_time,
activity.t_v, activity.t_r, activity.t_a, activity.t_v, activity.t_r, activity.t_a,
activity.visits_14d, activity.visits_60d, activity.visits_total, activity.visits_14d, activity.visits_30d, activity.visits_60d, activity.visits_total,
activity.spend_30d, activity.spend_180d, activity.sv_balance, activity.recharge_60d_amt, activity.spend_30d, activity.spend_180d, activity.sv_balance, activity.recharge_60d_amt,
activity.interval_count, activity.interval_count,
data.need_new, data.salvage_new, data.recharge_new, data.value_new, data.need_new, data.salvage_new, data.recharge_new, data.value_new,

View File

@@ -353,7 +353,12 @@ class SpendingPowerIndexTask(BaseIndexTask):
COALESCE(NULLIF(s.member_id, 0), mca.tenant_member_id) COALESCE(NULLIF(s.member_id, 0), mca.tenant_member_id)
AS canonical_member_id, AS canonical_member_id,
s.pay_time, s.pay_time,
COALESCE(s.pay_amount, 0) AS pay_amount -- DWD 规则 #1禁止 consume_money/pay_amount使用 items_sum
(COALESCE(s.table_charge_money, 0)
+ COALESCE(s.goods_money, 0)
+ COALESCE(s.assistant_pd_money, 0)
+ COALESCE(s.assistant_cx_money, 0)
+ COALESCE(s.electricity_money, 0)) AS items_sum
FROM dwd.dwd_settlement_head s FROM dwd.dwd_settlement_head s
LEFT JOIN dwd.dim_member_card_account mca LEFT JOIN dwd.dim_member_card_account mca
ON s.member_card_account_id = mca.member_card_id ON s.member_card_account_id = mca.member_card_id
@@ -368,14 +373,14 @@ class SpendingPowerIndexTask(BaseIndexTask):
SELECT SELECT
canonical_member_id AS member_id, canonical_member_id AS member_id,
-- 90 天窗口 -- 90 天窗口
SUM(pay_amount) AS spend_90, SUM(items_sum) AS spend_90,
COUNT(*) AS orders_90, COUNT(*) AS orders_90,
COUNT(DISTINCT {biz_expr}) AS visit_days_90, COUNT(DISTINCT {biz_expr}) AS visit_days_90,
COUNT(DISTINCT EXTRACT(ISOYEAR FROM pay_time)::int * 100 COUNT(DISTINCT EXTRACT(ISOYEAR FROM pay_time)::int * 100
+ EXTRACT(WEEK FROM pay_time)::int) AS active_weeks_90, + EXTRACT(WEEK FROM pay_time)::int) AS active_weeks_90,
-- 30 天窗口(子集过滤) -- 30 天窗口(子集过滤)
SUM(CASE WHEN pay_time >= %s - INTERVAL '{short_days} days' SUM(CASE WHEN pay_time >= %s - INTERVAL '{short_days} days'
THEN pay_amount ELSE 0 END) AS spend_30, THEN items_sum ELSE 0 END) AS spend_30,
SUM(CASE WHEN pay_time >= %s - INTERVAL '{short_days} days' SUM(CASE WHEN pay_time >= %s - INTERVAL '{short_days} days'
THEN 1 ELSE 0 END) AS orders_30, THEN 1 ELSE 0 END) AS orders_30,
COUNT(DISTINCT CASE WHEN pay_time >= %s - INTERVAL '{short_days} days' COUNT(DISTINCT CASE WHEN pay_time >= %s - INTERVAL '{short_days} days'
@@ -491,7 +496,12 @@ class SpendingPowerIndexTask(BaseIndexTask):
COALESCE(NULLIF(s.member_id, 0), mca.tenant_member_id) COALESCE(NULLIF(s.member_id, 0), mca.tenant_member_id)
AS canonical_member_id, AS canonical_member_id,
{biz_expr_s} AS pay_date, {biz_expr_s} AS pay_date,
COALESCE(s.pay_amount, 0) AS pay_amount -- DWD 规则 #1使用 items_sum
(COALESCE(s.table_charge_money, 0)
+ COALESCE(s.goods_money, 0)
+ COALESCE(s.assistant_pd_money, 0)
+ COALESCE(s.assistant_cx_money, 0)
+ COALESCE(s.electricity_money, 0)) AS items_sum
FROM dwd.dwd_settlement_head s FROM dwd.dwd_settlement_head s
LEFT JOIN dwd.dim_member_card_account mca LEFT JOIN dwd.dim_member_card_account mca
ON s.member_card_account_id = mca.member_card_id ON s.member_card_account_id = mca.member_card_id
@@ -502,7 +512,7 @@ class SpendingPowerIndexTask(BaseIndexTask):
AND s.settle_type IN (1, 3) AND s.settle_type IN (1, 3)
AND s.pay_time >= NOW() - INTERVAL '{long_days} days' AND s.pay_time >= NOW() - INTERVAL '{long_days} days'
) )
SELECT pay_date, SUM(pay_amount) AS daily_spend SELECT pay_date, SUM(items_sum) AS daily_spend
FROM consume_source FROM consume_source
WHERE canonical_member_id = %s WHERE canonical_member_id = %s
GROUP BY pay_date GROUP BY pay_date
@@ -544,7 +554,12 @@ class SpendingPowerIndexTask(BaseIndexTask):
COALESCE(NULLIF(s.member_id, 0), mca.tenant_member_id) COALESCE(NULLIF(s.member_id, 0), mca.tenant_member_id)
AS canonical_member_id, AS canonical_member_id,
{biz_expr_s} AS pay_date, {biz_expr_s} AS pay_date,
COALESCE(s.pay_amount, 0) AS pay_amount -- DWD 规则 #1使用 items_sum
(COALESCE(s.table_charge_money, 0)
+ COALESCE(s.goods_money, 0)
+ COALESCE(s.assistant_pd_money, 0)
+ COALESCE(s.assistant_cx_money, 0)
+ COALESCE(s.electricity_money, 0)) AS items_sum
FROM dwd.dwd_settlement_head s FROM dwd.dwd_settlement_head s
LEFT JOIN dwd.dim_member_card_account mca LEFT JOIN dwd.dim_member_card_account mca
ON s.member_card_account_id = mca.member_card_id ON s.member_card_account_id = mca.member_card_id
@@ -558,7 +573,7 @@ class SpendingPowerIndexTask(BaseIndexTask):
) )
SELECT canonical_member_id AS member_id, SELECT canonical_member_id AS member_id,
pay_date, pay_date,
SUM(pay_amount) AS daily_spend SUM(items_sum) AS daily_spend
FROM consume_source FROM consume_source
WHERE canonical_member_id > 0 WHERE canonical_member_id > 0
GROUP BY canonical_member_id, pay_date GROUP BY canonical_member_id, pay_date

View File

@@ -369,7 +369,7 @@ class WinbackIndexTask(MemberIndexBaseTask):
status, segment, status, segment,
member_create_time, first_visit_time, last_visit_time, last_recharge_time, member_create_time, first_visit_time, last_visit_time, last_recharge_time,
t_v, t_r, t_a, t_v, t_r, t_a,
visits_14d, visits_60d, visits_total, visits_14d, visits_30d, visits_60d, visits_total,
spend_30d, spend_180d, sv_balance, recharge_60d_amt, spend_30d, spend_180d, sv_balance, recharge_60d_amt,
interval_count, interval_count,
overdue_old, overdue_cdf_p, drop_old, recharge_old, value_old, overdue_old, overdue_cdf_p, drop_old, recharge_old, value_old,
@@ -382,7 +382,7 @@ class WinbackIndexTask(MemberIndexBaseTask):
%s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s,
%s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
@@ -402,7 +402,7 @@ class WinbackIndexTask(MemberIndexBaseTask):
data.status, data.segment, data.status, data.segment,
activity.member_create_time, activity.first_visit_time, activity.last_visit_time, activity.last_recharge_time, activity.member_create_time, activity.first_visit_time, activity.last_visit_time, activity.last_recharge_time,
activity.t_v, activity.t_r, activity.t_a, activity.t_v, activity.t_r, activity.t_a,
activity.visits_14d, activity.visits_60d, activity.visits_total, activity.visits_14d, activity.visits_30d, activity.visits_60d, activity.visits_total,
activity.spend_30d, activity.spend_180d, activity.sv_balance, activity.recharge_60d_amt, activity.spend_30d, activity.spend_180d, activity.sv_balance, activity.recharge_60d_amt,
activity.interval_count, activity.interval_count,
data.overdue_old, data.overdue_cdf_p, data.drop_old, data.recharge_old, data.value_old, data.overdue_old, data.overdue_cdf_p, data.drop_old, data.recharge_old, data.value_old,

View File

@@ -162,6 +162,8 @@ Page({
_lastScrollTop: 0, _lastScrollTop: 0,
_scrollAcc: 0, _scrollAcc: 0,
_scrollDir: null as 'up' | 'down' | null, _scrollDir: null as 'up' | 'down' | null,
/** 请求代数计数器:切换筛选时递增,旧请求的响应到达后对比丢弃 */
_loadGeneration: 0,
onLoad() { onLoad() {
this.loadData() this.loadData()
@@ -181,6 +183,7 @@ Page({
}, },
onPullDownRefresh() { onPullDownRefresh() {
this._loadGeneration++
this.setData({ currentPage: 1, hasMore: true, coaches: [], allCoaches: [] }) this.setData({ currentPage: 1, hasMore: true, coaches: [], allCoaches: [] })
this.loadData() this.loadData()
setTimeout(() => wx.stopPullDownRefresh(), 500) setTimeout(() => wx.stopPullDownRefresh(), 500)
@@ -219,7 +222,9 @@ Page({
// CHANGE 2026-03-28 | P4 联调:替换 Mock 为真实 API 调用 // CHANGE 2026-03-28 | P4 联调:替换 Mock 为真实 API 调用
// CHANGE 2026-03-29 | 懒加载:支持分页追加 // CHANGE 2026-03-29 | 懒加载:支持分页追加
// CHANGE 2026-04-08 | 竞态修复:用 _loadGeneration 丢弃过时响应
async loadData() { async loadData() {
const gen = this._loadGeneration
const page = this.data.currentPage const page = this.data.currentPage
const isFirstPage = page === 1 const isFirstPage = page === 1
@@ -239,6 +244,8 @@ Page({
page, page,
pageSize: this.data.pageSize, pageSize: this.data.pageSize,
}) })
// 竞态保护:响应到达时代数已变(用户切了筛选),丢弃本次结果
if (gen !== this._loadGeneration) return
const items = res.items const items = res.items
if (isFirstPage && (!items || items.length === 0)) { if (isFirstPage && (!items || items.length === 0)) {
this.setData({ pageState: 'empty', totalCount: 0, hasMore: false }) this.setData({ pageState: 'empty', totalCount: 0, hasMore: false })
@@ -318,6 +325,7 @@ Page({
onSortChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) { onSortChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) {
const val = e.detail.value const val = e.detail.value
this._loadGeneration++
this.setData({ this.setData({
selectedSort: val, selectedSort: val,
dimType: SORT_TO_DIM[val] || 'perf', dimType: SORT_TO_DIM[val] || 'perf',
@@ -327,11 +335,13 @@ Page({
}, },
onSkillChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) { onSkillChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) {
this._loadGeneration++
this.setData({ selectedSkill: e.detail.value, currentPage: 1, hasMore: true, coaches: [], allCoaches: [] }) this.setData({ selectedSkill: e.detail.value, currentPage: 1, hasMore: true, coaches: [], allCoaches: [] })
this.loadData() this.loadData()
}, },
onTimeChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) { onTimeChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) {
this._loadGeneration++
this.setData({ selectedTime: e.detail.value, currentPage: 1, hasMore: true, coaches: [], allCoaches: [] }) this.setData({ selectedTime: e.detail.value, currentPage: 1, hasMore: true, coaches: [], allCoaches: [] })
this.loadData() this.loadData()
}, },

View File

@@ -33,7 +33,7 @@ const DIMENSION_OPTIONS = [
{ value: 'recent', text: '最近到店' }, { value: 'recent', text: '最近到店' },
{ value: 'spend60', text: '最高消费 近60天' }, { value: 'spend60', text: '最高消费 近60天' },
{ value: 'freq60', text: '最频繁 近60天' }, { value: 'freq60', text: '最频繁 近60天' },
{ value: 'loyal', text: '最专一 近60天' }, { value: 'loyal', text: '最专一 近90天' },
] ]
// CHANGE 2026-03-28 | P9 修复value 改为数据库 category_code与后端枚举一致。 // CHANGE 2026-03-28 | P9 修复value 改为数据库 category_code与后端枚举一致。
@@ -200,6 +200,8 @@ Page({
_lastScrollTop: 0, _lastScrollTop: 0,
_scrollAcc: 0, _scrollAcc: 0,
_scrollDir: null as 'up' | 'down' | null, _scrollDir: null as 'up' | 'down' | null,
/** 请求代数计数器:切换维度/项目时递增,旧请求的响应到达后对比丢弃 */
_loadGeneration: 0,
onLoad() { onLoad() {
this.loadData() this.loadData()
@@ -219,6 +221,7 @@ Page({
}, },
onPullDownRefresh() { onPullDownRefresh() {
this._loadGeneration++
this.setData({ currentPage: 1, hasMore: true, customers: [], allCustomers: [] }) this.setData({ currentPage: 1, hasMore: true, customers: [], allCustomers: [] })
this.loadData() this.loadData()
setTimeout(() => wx.stopPullDownRefresh(), 500) setTimeout(() => wx.stopPullDownRefresh(), 500)
@@ -257,7 +260,9 @@ Page({
// CHANGE 2026-03-28 | P5 联调:替换 Mock 为真实 API 调用 // CHANGE 2026-03-28 | P5 联调:替换 Mock 为真实 API 调用
// CHANGE 2026-03-29 | 懒加载:支持分页追加 // CHANGE 2026-03-29 | 懒加载:支持分页追加
// CHANGE 2026-04-08 | 竞态修复:用 _loadGeneration 丢弃过时响应
async loadData() { async loadData() {
const gen = this._loadGeneration
const page = this.data.currentPage const page = this.data.currentPage
const isFirstPage = page === 1 const isFirstPage = page === 1
@@ -276,6 +281,8 @@ Page({
page, page,
pageSize: this.data.pageSize, pageSize: this.data.pageSize,
}) })
// 竞态保护:响应到达时代数已变(用户切了维度/项目),丢弃本次结果
if (gen !== this._loadGeneration) return
const items = res.items const items = res.items
if (isFirstPage && (!items || items.length === 0)) { if (isFirstPage && (!items || items.length === 0)) {
this.setData({ pageState: 'empty', totalCount: 0, hasMore: false }) this.setData({ pageState: 'empty', totalCount: 0, hasMore: false })
@@ -348,6 +355,7 @@ Page({
onDimensionChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) { onDimensionChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) {
const val = e.detail.value const val = e.detail.value
this._loadGeneration++
this.setData({ this.setData({
selectedDimension: val, selectedDimension: val,
dimType: DIMENSION_TO_DIM[val] || 'recall', dimType: DIMENSION_TO_DIM[val] || 'recall',
@@ -357,6 +365,7 @@ Page({
}, },
onProjectChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) { onProjectChange(e: WechatMiniprogram.CustomEvent<{ value: string }>) {
this._loadGeneration++
this.setData({ selectedProject: e.detail.value, currentPage: 1, hasMore: true, customers: [], allCustomers: [] }) this.setData({ selectedProject: e.detail.value, currentPage: 1, hasMore: true, customers: [], allCustomers: [] })
this.loadData() this.loadData()
}, },

View File

@@ -165,7 +165,7 @@
<text class="grid-val grid-val--lg">{{fmt.money(item.spend30d)}}</text> <text class="grid-val grid-val--lg">{{fmt.money(item.spend30d)}}</text>
</view> </view>
<view class="grid-cell"> <view class="grid-cell">
<text class="grid-label">月均到店</text> <text class="grid-label">近30天到店</text>
<text class="grid-val">{{fmt.count(item.avgVisits, '次')}}</text> <text class="grid-val">{{fmt.count(item.avgVisits, '次')}}</text>
</view> </view>
<view class="grid-cell"> <view class="grid-cell">
@@ -201,7 +201,7 @@
<text class="grid-val">{{fmt.safe(item.lastRecharge)}}</text> <text class="grid-val">{{fmt.safe(item.lastRecharge)}}</text>
</view> </view>
<view class="grid-cell"> <view class="grid-cell">
<text class="grid-label">充值</text> <text class="grid-label">累计充值</text>
<text class="grid-val grid-val--success">{{fmt.money(item.rechargeAmount)}}</text> <text class="grid-val grid-val--success">{{fmt.money(item.rechargeAmount)}}</text>
</view> </view>
<view class="grid-cell"> <view class="grid-cell">

View File

@@ -1248,7 +1248,7 @@ AI_CHANGELOG
/* CHANGE 2026-03-14 | intent: H5 gap-1=4px → spec p-1=8rpx */ /* CHANGE 2026-03-14 | intent: H5 gap-1=4px → spec p-1=8rpx */
.coach-fin-header { .coach-fin-header {
display: grid; display: grid;
grid-template-columns: 1fr 1.2fr 1.2fr 1fr; grid-template-columns: 1fr 1.5fr 1.5fr 1fr;
gap: 8rpx; gap: 8rpx;
padding: 14rpx 30rpx; padding: 14rpx 30rpx;
background: #f0f0f0; background: #f0f0f0;
@@ -1276,7 +1276,7 @@ AI_CHANGELOG
/* CHANGE 2026-03-14 | intent: H5 gap-1=4px → spec p-1=8rpx基础行 py-2=14rpx */ /* CHANGE 2026-03-14 | intent: H5 gap-1=4px → spec p-1=8rpx基础行 py-2=14rpx */
.coach-fin-row { .coach-fin-row {
display: grid; display: grid;
grid-template-columns: 1fr 1.2fr 1.2fr 1fr; grid-template-columns: 1fr 1.5fr 1.5fr 1fr;
gap: 8rpx; gap: 8rpx;
padding: 14rpx 30rpx; padding: 14rpx 30rpx;
border-bottom: 2rpx solid #f3f3f3; border-bottom: 2rpx solid #f3f3f3;
@@ -1306,7 +1306,7 @@ AI_CHANGELOG
/* CHANGE 2026-03-13 | intent: 校对 H5 coach-fin-bold fontSize:14px→24rpx(87.5%) */ /* CHANGE 2026-03-13 | intent: 校对 H5 coach-fin-bold fontSize:14px→24rpx(87.5%) */
.coach-fin-bold { .coach-fin-bold {
font-size: 26rpx; font-size: 30rpx;
line-height: 36rpx; line-height: 36rpx;
font-weight: 600; font-weight: 600;
color: #242424; color: #242424;

View File

@@ -0,0 +1,74 @@
-- 迁移:为 dws_member_winback_index 和 dws_member_newconv_index 添加 visits_30d 字段
-- Fix-3recall 维度需要 30 天到店次数,原先用 visits_14d 近似
-- 日期2026-04-07
-- 重要:加列后必须重建所有引用这两张表的视图,否则视图看不到新列
-- 1. 加列
ALTER TABLE dws.dws_member_winback_index
ADD COLUMN IF NOT EXISTS visits_30d integer DEFAULT 0 NOT NULL;
ALTER TABLE dws.dws_member_newconv_index
ADD COLUMN IF NOT EXISTS visits_30d integer DEFAULT 0 NOT NULL;
-- 2. 重建 app schema RLS 视图DROP + CREATE因为列顺序变化导致 CREATE OR REPLACE 报错)
DROP VIEW IF EXISTS app.v_dws_member_winback_index;
CREATE VIEW app.v_dws_member_winback_index AS
SELECT winback_id, site_id, tenant_id, member_id, status, segment,
member_create_time, first_visit_time, last_visit_time, last_recharge_time,
t_v, t_r, t_a,
visits_14d, visits_30d, visits_60d, visits_total,
spend_30d, spend_180d, sv_balance, recharge_60d_amt, interval_count,
overdue_old, drop_old, recharge_old, value_old,
raw_score, display_score, last_wechat_touch_time,
calc_time, calc_version, created_at, updated_at,
overdue_cdf_p, ideal_interval_days, ideal_next_visit_date
FROM dws.dws_member_winback_index
WHERE site_id = current_setting('app.current_site_id')::bigint;
DROP VIEW IF EXISTS app.v_dws_member_newconv_index;
CREATE VIEW app.v_dws_member_newconv_index AS
SELECT newconv_id, site_id, tenant_id, member_id, status, segment,
member_create_time, first_visit_time, last_visit_time, last_recharge_time,
t_v, t_r, t_a,
visits_14d, visits_30d, visits_60d, visits_total,
spend_30d, spend_180d, sv_balance, recharge_60d_amt, interval_count,
need_new, salvage_new, recharge_new, value_new, welcome_new,
raw_score_welcome, raw_score_convert, raw_score,
display_score_welcome, display_score_convert, display_score,
last_wechat_touch_time, calc_time, calc_version, created_at, updated_at
FROM dws.dws_member_newconv_index
WHERE site_id = current_setting('app.current_site_id')::bigint;
-- 3. 重建 dws.v_member_recall_priority (UNION ALL 视图)
DROP VIEW IF EXISTS dws.v_member_recall_priority;
CREATE VIEW dws.v_member_recall_priority AS
SELECT site_id, tenant_id, member_id, 'WBI'::varchar(10) AS index_type,
status, segment, member_create_time, first_visit_time, last_visit_time, last_recharge_time,
t_v, t_r, t_a, visits_14d, visits_30d, visits_60d, visits_total,
spend_30d, spend_180d, sv_balance, recharge_60d_amt,
NULL::numeric(10,4) AS need_new, NULL::numeric(10,4) AS salvage_new,
NULL::numeric(10,4) AS recharge_new, NULL::numeric(10,4) AS value_new,
NULL::numeric(10,4) AS welcome_new,
NULL::numeric(14,6) AS raw_score_welcome, NULL::numeric(14,6) AS raw_score_convert,
raw_score,
NULL::numeric(4,2) AS display_score_welcome, NULL::numeric(4,2) AS display_score_convert,
display_score, last_wechat_touch_time, calc_time
FROM dws.dws_member_winback_index
UNION ALL
SELECT site_id, tenant_id, member_id, 'NCI'::varchar(10) AS index_type,
status, segment, member_create_time, first_visit_time, last_visit_time, last_recharge_time,
t_v, t_r, t_a, visits_14d, visits_30d, visits_60d, visits_total,
spend_30d, spend_180d, sv_balance, recharge_60d_amt,
need_new, salvage_new, recharge_new, value_new, welcome_new,
raw_score_welcome, raw_score_convert, raw_score,
display_score_welcome, display_score_convert, display_score,
last_wechat_touch_time, calc_time
FROM dws.dws_member_newconv_index;
-- 4. 回滚脚本(如需回滚,按逆序执行):
-- DROP VIEW IF EXISTS dws.v_member_recall_priority;
-- DROP VIEW IF EXISTS app.v_dws_member_newconv_index;
-- DROP VIEW IF EXISTS app.v_dws_member_winback_index;
-- ALTER TABLE dws.dws_member_winback_index DROP COLUMN IF EXISTS visits_30d;
-- ALTER TABLE dws.dws_member_newconv_index DROP COLUMN IF EXISTS visits_30d;
-- 然后重建原始视图(不含 visits_30d

View File

@@ -0,0 +1,58 @@
-- 迁移:修复 app schema 快照视图多行膨胀
-- 背景consumption_summary 和 assistant_customer_stats 按 stat_date 存储每日快照,
-- 原视图返回全部行导致 JOIN 行膨胀和前端卡片重复。
-- 修复DISTINCT ON 只取每个唯一键的最新 stat_date 行。
-- 日期2026-04-08
-- 1. v_dws_member_consumption_summary每个 member_id 只取最新快照
CREATE OR REPLACE VIEW app.v_dws_member_consumption_summary AS
SELECT DISTINCT ON (member_id)
id, site_id, tenant_id, member_id, stat_date,
member_nickname, member_mobile, card_grade_name,
register_date, first_consume_date, last_consume_date,
total_visit_count, total_consume_amount, total_recharge_amount,
total_table_fee, total_goods_amount, total_assistant_amount,
visit_count_7d, visit_count_10d, visit_count_15d,
visit_count_30d, visit_count_60d, visit_count_90d,
consume_amount_7d, consume_amount_10d, consume_amount_15d,
consume_amount_30d, consume_amount_60d, consume_amount_90d,
cash_card_balance, gift_card_balance, total_card_balance,
days_since_last, is_active_7d, is_active_30d, is_active_90d,
customer_tier, created_at, updated_at,
recharge_count_30d, recharge_count_60d, recharge_count_90d,
recharge_amount_30d, recharge_amount_60d, recharge_amount_90d,
avg_ticket_amount
FROM dws.dws_member_consumption_summary
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint)
ORDER BY member_id, stat_date DESC;
-- 2. v_dws_assistant_customer_stats每对 (assistant_id, member_id) 只取最新快照
CREATE OR REPLACE VIEW app.v_dws_assistant_customer_stats AS
SELECT DISTINCT ON (assistant_id, member_id)
id, site_id, tenant_id, assistant_id, assistant_nickname,
member_id, member_nickname, member_mobile, stat_date,
first_service_date, last_service_date,
total_service_count, total_service_hours, total_service_amount,
service_count_7d, service_count_10d, service_count_15d,
service_count_30d, service_count_60d, service_count_90d,
service_hours_7d, service_hours_10d, service_hours_15d,
service_hours_30d, service_hours_60d, service_hours_90d,
service_amount_7d, service_amount_10d, service_amount_15d,
service_amount_30d, service_amount_60d, service_amount_90d,
days_since_last, is_active_7d, is_active_30d,
created_at, updated_at
FROM dws.dws_assistant_customer_stats
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint)
ORDER BY assistant_id, member_id, stat_date DESC;
-- 验证 SQL执行后检查:
-- SET app.current_site_id = '<site_id>';
-- 1) SELECT COUNT(*), COUNT(DISTINCT member_id) FROM app.v_dws_member_consumption_summary; -- 两个值应该相等
-- 2) SELECT COUNT(*), COUNT(DISTINCT (assistant_id, member_id)) FROM app.v_dws_assistant_customer_stats; -- 两个值应该相等
-- 3) SELECT member_id, COUNT(*) FROM app.v_dws_member_consumption_summary GROUP BY member_id HAVING COUNT(*) > 1; -- 应返回 0 行
-- 回滚:
-- CREATE OR REPLACE VIEW app.v_dws_member_consumption_summary AS
-- SELECT ... FROM dws.dws_member_consumption_summary WHERE (site_id = ...); -- 去掉 DISTINCT ON 和 ORDER BY
-- CREATE OR REPLACE VIEW app.v_dws_assistant_customer_stats AS
-- SELECT ... FROM dws.dws_assistant_customer_stats WHERE (site_id = ...); -- 同上

View File

@@ -439,8 +439,10 @@ SELECT table_fee_log_id,
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint); WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint);
; ;
-- CHANGE 2026-04-08 | DISTINCT ON 只取每对 (assistant, member) 最新快照
CREATE OR REPLACE VIEW app.v_dws_assistant_customer_stats AS CREATE OR REPLACE VIEW app.v_dws_assistant_customer_stats AS
SELECT id, SELECT DISTINCT ON (assistant_id, member_id)
id,
site_id, site_id,
tenant_id, tenant_id,
assistant_id, assistant_id,
@@ -478,7 +480,8 @@ SELECT id,
created_at, created_at,
updated_at updated_at
FROM dws.dws_assistant_customer_stats FROM dws.dws_assistant_customer_stats
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint); WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint)
ORDER BY assistant_id, member_id, stat_date DESC;
; ;
CREATE OR REPLACE VIEW app.v_dws_assistant_daily_detail AS CREATE OR REPLACE VIEW app.v_dws_assistant_daily_detail AS
@@ -976,8 +979,10 @@ SELECT relation_id,
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint); WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint);
; ;
-- CHANGE 2026-04-08 | DISTINCT ON 只取每个会员最新快照,避免多 stat_date 行膨胀
CREATE OR REPLACE VIEW app.v_dws_member_consumption_summary AS CREATE OR REPLACE VIEW app.v_dws_member_consumption_summary AS
SELECT id, SELECT DISTINCT ON (member_id)
id,
site_id, site_id,
tenant_id, tenant_id,
member_id, member_id,
@@ -1024,7 +1029,8 @@ SELECT id,
recharge_amount_90d, recharge_amount_90d,
avg_ticket_amount avg_ticket_amount
FROM dws.dws_member_consumption_summary FROM dws.dws_member_consumption_summary
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint); WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint)
ORDER BY member_id, stat_date DESC;
; ;
CREATE OR REPLACE VIEW app.v_dws_member_newconv_index AS CREATE OR REPLACE VIEW app.v_dws_member_newconv_index AS
@@ -1042,6 +1048,7 @@ SELECT newconv_id,
t_r, t_r,
t_a, t_a,
visits_14d, visits_14d,
visits_30d,
visits_60d, visits_60d,
visits_total, visits_total,
spend_30d, spend_30d,
@@ -1167,6 +1174,7 @@ SELECT winback_id,
t_r, t_r,
t_a, t_a,
visits_14d, visits_14d,
visits_30d,
visits_60d, visits_60d,
visits_total, visits_total,
spend_30d, spend_30d,

View File

@@ -789,6 +789,7 @@ CREATE TABLE dws.dws_member_newconv_index (
t_r numeric(6,2), t_r numeric(6,2),
t_a numeric(6,2), t_a numeric(6,2),
visits_14d integer DEFAULT 0 NOT NULL, visits_14d integer DEFAULT 0 NOT NULL,
visits_30d integer DEFAULT 0 NOT NULL,
visits_60d integer DEFAULT 0 NOT NULL, visits_60d integer DEFAULT 0 NOT NULL,
visits_total integer DEFAULT 0 NOT NULL, visits_total integer DEFAULT 0 NOT NULL,
spend_30d numeric(14,2) DEFAULT 0 NOT NULL, spend_30d numeric(14,2) DEFAULT 0 NOT NULL,
@@ -907,6 +908,7 @@ CREATE TABLE dws.dws_member_winback_index (
t_r numeric(6,2), t_r numeric(6,2),
t_a numeric(6,2), t_a numeric(6,2),
visits_14d integer DEFAULT 0 NOT NULL, visits_14d integer DEFAULT 0 NOT NULL,
visits_30d integer DEFAULT 0 NOT NULL,
visits_60d integer DEFAULT 0 NOT NULL, visits_60d integer DEFAULT 0 NOT NULL,
visits_total integer DEFAULT 0 NOT NULL, visits_total integer DEFAULT 0 NOT NULL,
spend_30d numeric(14,2) DEFAULT 0 NOT NULL, spend_30d numeric(14,2) DEFAULT 0 NOT NULL,
@@ -1297,6 +1299,7 @@ SELECT dws_member_winback_index.site_id,
dws_member_winback_index.t_r, dws_member_winback_index.t_r,
dws_member_winback_index.t_a, dws_member_winback_index.t_a,
dws_member_winback_index.visits_14d, dws_member_winback_index.visits_14d,
dws_member_winback_index.visits_30d,
dws_member_winback_index.visits_60d, dws_member_winback_index.visits_60d,
dws_member_winback_index.visits_total, dws_member_winback_index.visits_total,
dws_member_winback_index.spend_30d, dws_member_winback_index.spend_30d,
@@ -1332,6 +1335,7 @@ UNION ALL
dws_member_newconv_index.t_r, dws_member_newconv_index.t_r,
dws_member_newconv_index.t_a, dws_member_newconv_index.t_a,
dws_member_newconv_index.visits_14d, dws_member_newconv_index.visits_14d,
dws_member_newconv_index.visits_30d,
dws_member_newconv_index.visits_60d, dws_member_newconv_index.visits_60d,
dws_member_newconv_index.visits_total, dws_member_newconv_index.visits_total,
dws_member_newconv_index.spend_30d, dws_member_newconv_index.spend_30d,

View File

@@ -0,0 +1,11 @@
-- 迁移coach_tasks 添加 completion_type 字段
-- Fix-13区分自动完成auto和手动完成manual
-- 日期2026-04-07
ALTER TABLE biz.coach_tasks
ADD COLUMN IF NOT EXISTS completion_type character varying(10);
COMMENT ON COLUMN biz.coach_tasks.completion_type IS '完成类型auto=系统自动检测到店, manual=助教手动标记';
-- 回滚:
-- ALTER TABLE biz.coach_tasks DROP COLUMN IF EXISTS completion_type;

View File

@@ -16,6 +16,7 @@ CREATE SEQUENCE IF NOT EXISTS biz.cfg_task_generator_params_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.coach_task_history_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.coach_task_history_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.coach_task_transfer_log_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.coach_task_transfer_log_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.coach_tasks_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.coach_tasks_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.recall_events_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.connectors_id_seq AS integer; CREATE SEQUENCE IF NOT EXISTS biz.connectors_id_seq AS integer;
CREATE SEQUENCE IF NOT EXISTS biz.dws_assistant_task_monthly_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.dws_assistant_task_monthly_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.excel_upload_log_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.excel_upload_log_id_seq AS bigint;
@@ -153,6 +154,7 @@ CREATE TABLE biz.coach_tasks (
abandon_reason text, abandon_reason text,
completed_at timestamp with time zone, completed_at timestamp with time zone,
completed_task_type character varying(50), completed_task_type character varying(50),
completion_type character varying(10),
parent_task_id bigint, parent_task_id bigint,
created_at timestamp with time zone DEFAULT now(), created_at timestamp with time zone DEFAULT now(),
updated_at timestamp with time zone DEFAULT now(), updated_at timestamp with time zone DEFAULT now(),
@@ -161,6 +163,17 @@ CREATE TABLE biz.coach_tasks (
transferred_at timestamp with time zone transferred_at timestamp with time zone
); );
CREATE TABLE biz.recall_events (
id bigint DEFAULT nextval('biz.recall_events_id_seq'::regclass) NOT NULL,
site_id bigint NOT NULL,
assistant_id bigint NOT NULL,
member_id bigint NOT NULL,
pay_time timestamp with time zone NOT NULL,
task_id bigint,
task_type character varying(50),
created_at timestamp with time zone DEFAULT now()
);
CREATE TABLE biz.connectors ( CREATE TABLE biz.connectors (
id integer DEFAULT nextval('biz.connectors_id_seq'::regclass) NOT NULL, id integer DEFAULT nextval('biz.connectors_id_seq'::regclass) NOT NULL,
connector_key character varying(50) NOT NULL, connector_key character varying(50) NOT NULL,
@@ -341,6 +354,8 @@ ALTER TABLE biz.dws_assistant_task_monthly ADD CONSTRAINT dws_assistant_task_mon
ALTER TABLE biz.excel_upload_log ADD CONSTRAINT excel_upload_log_pkey PRIMARY KEY (id); ALTER TABLE biz.excel_upload_log ADD CONSTRAINT excel_upload_log_pkey PRIMARY KEY (id);
ALTER TABLE biz.notes ADD CONSTRAINT notes_task_id_fkey FOREIGN KEY (task_id) REFERENCES biz.coach_tasks(id); ALTER TABLE biz.notes ADD CONSTRAINT notes_task_id_fkey FOREIGN KEY (task_id) REFERENCES biz.coach_tasks(id);
ALTER TABLE biz.notes ADD CONSTRAINT notes_pkey PRIMARY KEY (id); ALTER TABLE biz.notes ADD CONSTRAINT notes_pkey PRIMARY KEY (id);
ALTER TABLE biz.recall_events ADD CONSTRAINT recall_events_pkey PRIMARY KEY (id);
ALTER TABLE biz.recall_events ADD CONSTRAINT recall_events_task_id_fkey FOREIGN KEY (task_id) REFERENCES biz.coach_tasks(id);
ALTER TABLE biz.salary_adjustments ADD CONSTRAINT salary_adjustments_upload_batch_id_fkey FOREIGN KEY (upload_batch_id) REFERENCES biz.excel_upload_log(id); ALTER TABLE biz.salary_adjustments ADD CONSTRAINT salary_adjustments_upload_batch_id_fkey FOREIGN KEY (upload_batch_id) REFERENCES biz.excel_upload_log(id);
ALTER TABLE biz.salary_adjustments ADD CONSTRAINT salary_adjustments_pkey PRIMARY KEY (id); ALTER TABLE biz.salary_adjustments ADD CONSTRAINT salary_adjustments_pkey PRIMARY KEY (id);
ALTER TABLE biz.site_code_history ADD CONSTRAINT site_code_history_pkey PRIMARY KEY (id); ALTER TABLE biz.site_code_history ADD CONSTRAINT site_code_history_pkey PRIMARY KEY (id);
@@ -386,5 +401,7 @@ CREATE INDEX idx_task_monthly_site_month ON biz.dws_assistant_task_monthly USING
CREATE INDEX idx_excel_log_site ON biz.excel_upload_log USING btree (site_id, created_at DESC); CREATE INDEX idx_excel_log_site ON biz.excel_upload_log USING btree (site_id, created_at DESC);
CREATE INDEX idx_notes_target ON biz.notes USING btree (site_id, target_type, target_id); CREATE INDEX idx_notes_target ON biz.notes USING btree (site_id, target_type, target_id);
CREATE INDEX idx_salary_adj_assistant_month ON biz.salary_adjustments USING btree (assistant_id, salary_month); CREATE INDEX idx_salary_adj_assistant_month ON biz.salary_adjustments USING btree (assistant_id, salary_month);
CREATE UNIQUE INDEX idx_recall_events_site_assistant_member_day ON biz.recall_events USING btree (site_id, assistant_id, member_id, (date_trunc('day', pay_time AT TIME ZONE 'Asia/Shanghai')));
CREATE INDEX idx_recall_events_assistant_pay ON biz.recall_events USING btree (site_id, assistant_id, pay_time);
CREATE INDEX idx_salary_adj_site_month ON biz.salary_adjustments USING btree (site_id, salary_month); CREATE INDEX idx_salary_adj_site_month ON biz.salary_adjustments USING btree (site_id, salary_month);

View File

@@ -1,11 +1,12 @@
# 审计一览表 # 审计一览表
> 自动生成于 2026-04-06 01:04:43,请勿手动编辑。 > 自动生成于 2026-04-08 15:09:30,请勿手动编辑。
## 时间线视图 ## 时间线视图
| 日期 | 项目 | 需求摘要 | 变更类型 | 影响模块 | 风险 | 详情 | | 日期 | 项目 | 需求摘要 | 变更类型 | 影响模块 | 风险 | 详情 |
|------|------|----------|----------|----------|------|------| |------|------|----------|----------|----------|------|------|
| 2026-04-08 | 项目级 | 变更审计记录Fix-13 回滚手动完成 + 广义召回完成机制 | bugfix | 其他 | 低 | [链接](changes/2026-04-08__fix13-recall-events-refactor.md) |
| 2026-04-06 | 项目级 | 变更审计记录v1 历史清理与 DDL 合并归档 | 清理 | 其他 | 极低 | [链接](changes/2026-04-06__v1-cleanup-ddl-consolidation.md) | | 2026-04-06 | 项目级 | 变更审计记录v1 历史清理与 DDL 合并归档 | 清理 | 其他 | 极低 | [链接](changes/2026-04-06__v1-cleanup-ddl-consolidation.md) |
| 2026-04-05 | 项目级 | 变更审计记录Kiro → Claude Code 全量迁移 | 文档 | 其他 | 低 | [链接](changes/2026-04-05__kiro-to-claude-code-migration.md) | | 2026-04-05 | 项目级 | 变更审计记录Kiro → Claude Code 全量迁移 | 文档 | 其他 | 低 | [链接](changes/2026-04-05__kiro-to-claude-code-migration.md) |
| 2026-03-31 | 项目级 | 变更审计记录:任务引擎改造 — 参数调优 + 客户级升级/转移 + 任务统计写入 | 功能 | 其他 | 未知 | [链接](changes/2026-03-31__task-engine-overhaul.md) | | 2026-03-31 | 项目级 | 变更审计记录:任务引擎改造 — 参数调优 + 客户级升级/转移 + 任务统计写入 | 功能 | 其他 | 未知 | [链接](changes/2026-03-31__task-engine-overhaul.md) |
@@ -240,6 +241,7 @@
| 日期 | 需求摘要 | 变更类型 | 影响模块 | 风险 | 详情 | | 日期 | 需求摘要 | 变更类型 | 影响模块 | 风险 | 详情 |
|------|----------|----------|----------|------|------| |------|----------|----------|----------|------|------|
| 2026-04-08 | 变更审计记录Fix-13 回滚手动完成 + 广义召回完成机制 | bugfix | 其他 | 低 | [链接](changes/2026-04-08__fix13-recall-events-refactor.md) |
| 2026-04-06 | 变更审计记录v1 历史清理与 DDL 合并归档 | 清理 | 其他 | 极低 | [链接](changes/2026-04-06__v1-cleanup-ddl-consolidation.md) | | 2026-04-06 | 变更审计记录v1 历史清理与 DDL 合并归档 | 清理 | 其他 | 极低 | [链接](changes/2026-04-06__v1-cleanup-ddl-consolidation.md) |
| 2026-04-05 | 变更审计记录Kiro → Claude Code 全量迁移 | 文档 | 其他 | 低 | [链接](changes/2026-04-05__kiro-to-claude-code-migration.md) | | 2026-04-05 | 变更审计记录Kiro → Claude Code 全量迁移 | 文档 | 其他 | 低 | [链接](changes/2026-04-05__kiro-to-claude-code-migration.md) |
| 2026-03-31 | 变更审计记录:任务引擎改造 — 参数调优 + 客户级升级/转移 + 任务统计写入 | 功能 | 其他 | 未知 | [链接](changes/2026-03-31__task-engine-overhaul.md) | | 2026-03-31 | 变更审计记录:任务引擎改造 — 参数调优 + 客户级升级/转移 + 任务统计写入 | 功能 | 其他 | 未知 | [链接](changes/2026-03-31__task-engine-overhaul.md) |
@@ -370,6 +372,7 @@
| 日期 | 需求摘要 | 变更类型 | 风险 | 详情 | | 日期 | 需求摘要 | 变更类型 | 风险 | 详情 |
|------|----------|----------|------|------| |------|----------|----------|------|------|
| 2026-04-08 | 变更审计记录Fix-13 回滚手动完成 + 广义召回完成机制 | bugfix | 低 | [链接](changes/2026-04-08__fix13-recall-events-refactor.md) |
| 2026-04-06 | 变更审计记录v1 历史清理与 DDL 合并归档 | 清理 | 极低 | [链接](changes/2026-04-06__v1-cleanup-ddl-consolidation.md) | | 2026-04-06 | 变更审计记录v1 历史清理与 DDL 合并归档 | 清理 | 极低 | [链接](changes/2026-04-06__v1-cleanup-ddl-consolidation.md) |
| 2026-04-05 | 变更审计记录Kiro → Claude Code 全量迁移 | 文档 | 低 | [链接](changes/2026-04-05__kiro-to-claude-code-migration.md) | | 2026-04-05 | 变更审计记录Kiro → Claude Code 全量迁移 | 文档 | 低 | [链接](changes/2026-04-05__kiro-to-claude-code-migration.md) |
| 2026-03-31 | 变更审计记录:任务引擎改造 — 参数调优 + 客户级升级/转移 + 任务统计写入 | 功能 | 未知 | [链接](changes/2026-03-31__task-engine-overhaul.md) | | 2026-03-31 | 变更审计记录:任务引擎改造 — 参数调优 + 客户级升级/转移 + 任务统计写入 | 功能 | 未知 | [链接](changes/2026-03-31__task-engine-overhaul.md) |

View File

@@ -439,8 +439,10 @@ SELECT table_fee_log_id,
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint); WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint);
; ;
-- CHANGE 2026-04-08 | DISTINCT ON 只取每对 (assistant, member) 最新快照
CREATE OR REPLACE VIEW app.v_dws_assistant_customer_stats AS CREATE OR REPLACE VIEW app.v_dws_assistant_customer_stats AS
SELECT id, SELECT DISTINCT ON (assistant_id, member_id)
id,
site_id, site_id,
tenant_id, tenant_id,
assistant_id, assistant_id,
@@ -478,7 +480,8 @@ SELECT id,
created_at, created_at,
updated_at updated_at
FROM dws.dws_assistant_customer_stats FROM dws.dws_assistant_customer_stats
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint); WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint)
ORDER BY assistant_id, member_id, stat_date DESC;
; ;
CREATE OR REPLACE VIEW app.v_dws_assistant_daily_detail AS CREATE OR REPLACE VIEW app.v_dws_assistant_daily_detail AS
@@ -976,8 +979,10 @@ SELECT relation_id,
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint); WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint);
; ;
-- CHANGE 2026-04-08 | DISTINCT ON 只取每个会员最新快照,避免多 stat_date 行膨胀
CREATE OR REPLACE VIEW app.v_dws_member_consumption_summary AS CREATE OR REPLACE VIEW app.v_dws_member_consumption_summary AS
SELECT id, SELECT DISTINCT ON (member_id)
id,
site_id, site_id,
tenant_id, tenant_id,
member_id, member_id,
@@ -1024,7 +1029,8 @@ SELECT id,
recharge_amount_90d, recharge_amount_90d,
avg_ticket_amount avg_ticket_amount
FROM dws.dws_member_consumption_summary FROM dws.dws_member_consumption_summary
WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint); WHERE (site_id = (current_setting('app.current_site_id'::text))::bigint)
ORDER BY member_id, stat_date DESC;
; ;
CREATE OR REPLACE VIEW app.v_dws_member_newconv_index AS CREATE OR REPLACE VIEW app.v_dws_member_newconv_index AS
@@ -1042,6 +1048,7 @@ SELECT newconv_id,
t_r, t_r,
t_a, t_a,
visits_14d, visits_14d,
visits_30d,
visits_60d, visits_60d,
visits_total, visits_total,
spend_30d, spend_30d,
@@ -1167,6 +1174,7 @@ SELECT winback_id,
t_r, t_r,
t_a, t_a,
visits_14d, visits_14d,
visits_30d,
visits_60d, visits_60d,
visits_total, visits_total,
spend_30d, spend_30d,

View File

@@ -789,6 +789,7 @@ CREATE TABLE dws.dws_member_newconv_index (
t_r numeric(6,2), t_r numeric(6,2),
t_a numeric(6,2), t_a numeric(6,2),
visits_14d integer DEFAULT 0 NOT NULL, visits_14d integer DEFAULT 0 NOT NULL,
visits_30d integer DEFAULT 0 NOT NULL,
visits_60d integer DEFAULT 0 NOT NULL, visits_60d integer DEFAULT 0 NOT NULL,
visits_total integer DEFAULT 0 NOT NULL, visits_total integer DEFAULT 0 NOT NULL,
spend_30d numeric(14,2) DEFAULT 0 NOT NULL, spend_30d numeric(14,2) DEFAULT 0 NOT NULL,
@@ -907,6 +908,7 @@ CREATE TABLE dws.dws_member_winback_index (
t_r numeric(6,2), t_r numeric(6,2),
t_a numeric(6,2), t_a numeric(6,2),
visits_14d integer DEFAULT 0 NOT NULL, visits_14d integer DEFAULT 0 NOT NULL,
visits_30d integer DEFAULT 0 NOT NULL,
visits_60d integer DEFAULT 0 NOT NULL, visits_60d integer DEFAULT 0 NOT NULL,
visits_total integer DEFAULT 0 NOT NULL, visits_total integer DEFAULT 0 NOT NULL,
spend_30d numeric(14,2) DEFAULT 0 NOT NULL, spend_30d numeric(14,2) DEFAULT 0 NOT NULL,
@@ -1297,6 +1299,7 @@ SELECT dws_member_winback_index.site_id,
dws_member_winback_index.t_r, dws_member_winback_index.t_r,
dws_member_winback_index.t_a, dws_member_winback_index.t_a,
dws_member_winback_index.visits_14d, dws_member_winback_index.visits_14d,
dws_member_winback_index.visits_30d,
dws_member_winback_index.visits_60d, dws_member_winback_index.visits_60d,
dws_member_winback_index.visits_total, dws_member_winback_index.visits_total,
dws_member_winback_index.spend_30d, dws_member_winback_index.spend_30d,
@@ -1332,6 +1335,7 @@ UNION ALL
dws_member_newconv_index.t_r, dws_member_newconv_index.t_r,
dws_member_newconv_index.t_a, dws_member_newconv_index.t_a,
dws_member_newconv_index.visits_14d, dws_member_newconv_index.visits_14d,
dws_member_newconv_index.visits_30d,
dws_member_newconv_index.visits_60d, dws_member_newconv_index.visits_60d,
dws_member_newconv_index.visits_total, dws_member_newconv_index.visits_total,
dws_member_newconv_index.spend_30d, dws_member_newconv_index.spend_30d,

View File

@@ -16,6 +16,7 @@ CREATE SEQUENCE IF NOT EXISTS biz.cfg_task_generator_params_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.coach_task_history_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.coach_task_history_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.coach_task_transfer_log_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.coach_task_transfer_log_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.coach_tasks_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.coach_tasks_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.recall_events_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.connectors_id_seq AS integer; CREATE SEQUENCE IF NOT EXISTS biz.connectors_id_seq AS integer;
CREATE SEQUENCE IF NOT EXISTS biz.dws_assistant_task_monthly_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.dws_assistant_task_monthly_id_seq AS bigint;
CREATE SEQUENCE IF NOT EXISTS biz.excel_upload_log_id_seq AS bigint; CREATE SEQUENCE IF NOT EXISTS biz.excel_upload_log_id_seq AS bigint;
@@ -153,6 +154,7 @@ CREATE TABLE biz.coach_tasks (
abandon_reason text, abandon_reason text,
completed_at timestamp with time zone, completed_at timestamp with time zone,
completed_task_type character varying(50), completed_task_type character varying(50),
completion_type character varying(10),
parent_task_id bigint, parent_task_id bigint,
created_at timestamp with time zone DEFAULT now(), created_at timestamp with time zone DEFAULT now(),
updated_at timestamp with time zone DEFAULT now(), updated_at timestamp with time zone DEFAULT now(),
@@ -161,6 +163,17 @@ CREATE TABLE biz.coach_tasks (
transferred_at timestamp with time zone transferred_at timestamp with time zone
); );
CREATE TABLE biz.recall_events (
id bigint DEFAULT nextval('biz.recall_events_id_seq'::regclass) NOT NULL,
site_id bigint NOT NULL,
assistant_id bigint NOT NULL,
member_id bigint NOT NULL,
pay_time timestamp with time zone NOT NULL,
task_id bigint,
task_type character varying(50),
created_at timestamp with time zone DEFAULT now()
);
CREATE TABLE biz.connectors ( CREATE TABLE biz.connectors (
id integer DEFAULT nextval('biz.connectors_id_seq'::regclass) NOT NULL, id integer DEFAULT nextval('biz.connectors_id_seq'::regclass) NOT NULL,
connector_key character varying(50) NOT NULL, connector_key character varying(50) NOT NULL,
@@ -341,6 +354,8 @@ ALTER TABLE biz.dws_assistant_task_monthly ADD CONSTRAINT dws_assistant_task_mon
ALTER TABLE biz.excel_upload_log ADD CONSTRAINT excel_upload_log_pkey PRIMARY KEY (id); ALTER TABLE biz.excel_upload_log ADD CONSTRAINT excel_upload_log_pkey PRIMARY KEY (id);
ALTER TABLE biz.notes ADD CONSTRAINT notes_task_id_fkey FOREIGN KEY (task_id) REFERENCES biz.coach_tasks(id); ALTER TABLE biz.notes ADD CONSTRAINT notes_task_id_fkey FOREIGN KEY (task_id) REFERENCES biz.coach_tasks(id);
ALTER TABLE biz.notes ADD CONSTRAINT notes_pkey PRIMARY KEY (id); ALTER TABLE biz.notes ADD CONSTRAINT notes_pkey PRIMARY KEY (id);
ALTER TABLE biz.recall_events ADD CONSTRAINT recall_events_pkey PRIMARY KEY (id);
ALTER TABLE biz.recall_events ADD CONSTRAINT recall_events_task_id_fkey FOREIGN KEY (task_id) REFERENCES biz.coach_tasks(id);
ALTER TABLE biz.salary_adjustments ADD CONSTRAINT salary_adjustments_upload_batch_id_fkey FOREIGN KEY (upload_batch_id) REFERENCES biz.excel_upload_log(id); ALTER TABLE biz.salary_adjustments ADD CONSTRAINT salary_adjustments_upload_batch_id_fkey FOREIGN KEY (upload_batch_id) REFERENCES biz.excel_upload_log(id);
ALTER TABLE biz.salary_adjustments ADD CONSTRAINT salary_adjustments_pkey PRIMARY KEY (id); ALTER TABLE biz.salary_adjustments ADD CONSTRAINT salary_adjustments_pkey PRIMARY KEY (id);
ALTER TABLE biz.site_code_history ADD CONSTRAINT site_code_history_pkey PRIMARY KEY (id); ALTER TABLE biz.site_code_history ADD CONSTRAINT site_code_history_pkey PRIMARY KEY (id);
@@ -386,5 +401,7 @@ CREATE INDEX idx_task_monthly_site_month ON biz.dws_assistant_task_monthly USING
CREATE INDEX idx_excel_log_site ON biz.excel_upload_log USING btree (site_id, created_at DESC); CREATE INDEX idx_excel_log_site ON biz.excel_upload_log USING btree (site_id, created_at DESC);
CREATE INDEX idx_notes_target ON biz.notes USING btree (site_id, target_type, target_id); CREATE INDEX idx_notes_target ON biz.notes USING btree (site_id, target_type, target_id);
CREATE INDEX idx_salary_adj_assistant_month ON biz.salary_adjustments USING btree (assistant_id, salary_month); CREATE INDEX idx_salary_adj_assistant_month ON biz.salary_adjustments USING btree (assistant_id, salary_month);
CREATE UNIQUE INDEX idx_recall_events_site_assistant_member_day ON biz.recall_events USING btree (site_id, assistant_id, member_id, (date_trunc('day', pay_time AT TIME ZONE 'Asia/Shanghai')));
CREATE INDEX idx_recall_events_assistant_pay ON biz.recall_events USING btree (site_id, assistant_id, pay_time);
CREATE INDEX idx_salary_adj_site_month ON biz.salary_adjustments USING btree (site_id, salary_month); CREATE INDEX idx_salary_adj_site_month ON biz.salary_adjustments USING btree (site_id, salary_month);

View File

@@ -0,0 +1,242 @@
# 看板全面排查修复计划
> 排查日期2026-04-07
> 范围客户看板BOARD-28 维度 + 助教看板BOARD-14 维度
> 状态:待实施(新对话执行)
---
## 一、已完成的修复
| # | 问题 | 修复内容 | 文件 |
|---|------|----------|------|
| ✅ | SPI 消费口径用 `pay_amount` 而非 `items_sum` | 3 处 settlement_head 查询改为 `items_sum`DWD 规则 #1 | `apps/etl/connectors/feiqiu/tasks/dws/index/spending_power_index_task.py` |
| ✅ | 潜力标签阈值 60 但数据范围 0-10 | 阈值 60→6返回 `[{text, theme}]` 对象数组 | `apps/backend/app/services/fdw_queries.py` `_derive_potential_tags` |
---
## 二、待实施修复(共 10 项)
### P1 — 功能性 Bug
#### Fix-1客户看板项目筛选崩溃C-1
- **问题**`_project_filter_clause()` 硬编码 `vd.member_id`,但 6/8 维度主表别名不是 `vd`,选择项目筛选时后端 SQL 500
- **影响维度**recall / balance / recharge / spend60 / freq60 / loyal
- **不受影响**recent别名 `vd`、potential自写子查询
- **修复方案**:统一改为独立子查询模式 `member_id IN (SELECT member_id FROM app.v_dws_member_project_tag WHERE category_code = %s AND is_tagged = true)`,不依赖外层别名(参考 potential 维度已有写法)
- **文件**`apps/backend/app/services/fdw_queries.py``_project_filter_clause()` 函数及 6 个维度查询函数
#### Fix-2助教看板 task 维度 callback 映射错误A-3
- **问题**callback 统计映射到 `relationship_building`,但业务上"回访"应对应 `follow_up_visit`
- **修复**`board_service.py` `_query_coach_tasks()` 中 callback 的 task_type 从 `relationship_building` 改为 `follow_up_visit`
- **文件**`apps/backend/app/services/board_service.py`
### P2 — 数据口径错误
#### Fix-3recall 维度 visits_30d 实为 14dC-2
- **问题**`dws_member_winback_index``visits_30d` 字段,后端用 `visits_14d` 近似,前端显示"30天到店"
- **修复**
1. DDL`dws.dws_member_winback_index` 加列 `visits_30d INTEGER DEFAULT 0`
2. ETLWBI 计算任务增加 30 天到店次数统计
3. RLS 视图:`dws.v_dws_member_winback_index``app.v_dws_member_winback_index` 加新列
4. 后端:查询改用新字段
5. DDL 文档同步更新
- **文件**
- `db/etl_feiqiu/schemas/dws.sql`DDL
- `apps/etl/connectors/feiqiu/tasks/dws/index/winback_index_task.py`ETL
- `db/etl_feiqiu/schemas/app.sql`RLS 视图)
- `apps/backend/app/services/fdw_queries.py`(查询)
- `docs/database/ddl/etl_feiqiu__dws.sql` / `etl_feiqiu__app.sql`DDL 文档)
#### Fix-4balance 维度月均消耗和可用月数偏差 2 倍C-3 + C-4
- **问题**
- `monthlyConsume`:直接用 `consume_amount_60d`60天总额标签"月均消耗"应为 `consume_amount_60d / 2`
- `availableMonths``balance / consume_amount_60d`,应为 `balance / (consume_amount_60d / 2)``2 * balance / consume_amount_60d`
- **示例**:余额 49780 / 60天消费 14521 → 当前显示 3.4 个月,实际应 6.9 个月
- **文件**`apps/backend/app/services/fdw_queries.py``get_customer_board_balance()`
#### Fix-5freq60 柱状图数据源不一致C-7
- **问题**:汇总数据来自 `v_dws_member_consumption_summary`(消费维度),但 8 周柱状图来自 `v_dwd_assistant_service_log`(助教服务维度),口径不一致
- **修复**:柱状图改为从消费汇总或结算维度获取周数据,与汇总口径一致
- **文件**`apps/backend/app/services/fdw_queries.py` — freq60 相关查询
#### Fix-6助教看板 sv 维度不响应时间筛选A-4
- **问题**:函数接收 `start_date/end_date` 但 SQL 未使用,"消耗"始终是固定 60 天
- **修复**`sv_consume` 的查询加入 `start_date/end_date` 过滤,使其随时间筛选联动
- **文件**`apps/backend/app/services/fdw_queries.py``get_coach_sv_data()`
### P3 — 标签/文案修正
#### Fix-7loyal 维度标签"近60天"→"近90天"
- **问题**ETL 关系指数实际使用 90 天窗口(`lookback_days: 90`),非 60 天
- **修复**:下拉选项文本 `最专一 近60天``最专一 近90天`
- **文件**`apps/miniprogram/miniprogram/pages/board-customer/board-customer.ts``DIMENSION_OPTIONS`
#### Fix-8potential 维度"月均到店"→"近30天到店"
- **问题**:实际是 30 天到店天数(同一天多次只算一天),非"月均到店次数"
- **位置**board-customer 页面 → 最大消费潜力 tab → 4 列网格第 2 格
- **文件**`apps/miniprogram/miniprogram/pages/board-customer/board-customer.wxml`
#### Fix-9recharge 维度"充值"→"累计充值"
- **问题**:实际是 `SUM(pay_amount)` 历史累计充值总额,标签仅"充值"易误解为单次
- **位置**board-customer 页面 → 最近充值 tab → 4 列网格第 2 格
- **文件**`apps/miniprogram/miniprogram/pages/board-customer/board-customer.wxml`
#### Fix-10freq60 维度标签确认
- 此维度标签"最频繁 近60天"与实际口径一致(`visit_count_60d`),无需修改
---
## 三、DWD 合规检查结果(通过)
| 检查项 | 客户看板 | 助教看板 |
|--------|----------|----------|
| consume_money 禁止直接使用 | ✅ 全部走 DWS items_sum | ✅ |
| settle_type IN (1,3) | ✅ 不直查结算表 | ✅ |
| DQ-6 会员姓名通过 dim_member | ✅ | ✅ |
| DQ-7 会员卡通过 dim_member_card_account | ✅ | ✅ |
| 助教费用拆分 pd/cx | N/A | ✅ |
---
## 四、已知但不修的项
| 项 | 原因 |
|----|------|
| A-2sv 维度客户重复计算 | 业务定义确认:每个助教看"我的客户总余额",允许跨助教重叠,作为催促消耗的参考值 |
| A-5后端 `dim_type` 冗余 | 功能无影响,两端映射一致 |
---
## 五、recall_detector 修复 + 任务统计需求(同期实施)
> 需求确认日期2026-04-07
### 背景
`biz.coach_tasks` 当前 255 条记录completed = 0。排查确认
- 系统完成逻辑设计正确(`recall_detector.py` 匹配服务记录→标记 completed
- 但两个技术问题导致完成检测不可靠
- 缺少"助教主动标记完成"能力
- 任务完成统计需要三个维度
### 当前架构(调研结果)
| 服务 | 文件 | 职责 | 调度方式 |
|------|------|------|----------|
| `recall_detector` | `app/services/recall_detector.py` | 检测客户到店→标记召回任务完成→生成回访任务 | event: `etl_data_updated`(当前无人触发) |
| `task_generator` | `app/services/task_generator.py` | 根据 WBI/NCI/RS 指数生成召回/关系维护任务 | cron: `0 4 * * *`(每日凌晨 4 点) |
生成规则(四级漏斗):
- `max(WBI, NCI) > 7``high_priority_recall`
- `max(WBI, NCI) > 5``priority_recall`
- `1 < RS < 6``relationship_building`
- 不满足 → 不生成;`session_count > 0` 无任务时保底补充 `relationship_building`
回溯逻辑(已有):`recall_detector` 检测到新服务记录时,如匹配到活跃的 `follow_up_visit`,关闭旧任务(`superseded_by_new_visit`)并创建新回访。
### Fix-11recall_detector 增量时序 Bug + 编排顺序
**问题**`recall_detector``create_time > last_run_at` 全局增量指针。任务创建与客户到店时序不一致时漏匹配。
**示例时间线**
1. 4/5 08:00 — `recall_detector` 运行,`last_run_at = 08:00`
2. 4/5 14:00 — 客户张三到店,服务记录 `create_time = 14:00`
3. 4/6 02:00 — `task_generator` 为张三创建召回任务
4. 4/6 08:00 — `recall_detector` 再次运行,但 `last_run_at` 已更新,张三 4/5 的记录在上轮已扫描过(当时无任务)→ 永远不会重新匹配
**修复方案**
1. 废弃 `last_run_at` 增量指针,改为:对所有活跃任务,检查 `dwd_settlement_head` 中是否有 `pay_time > task.created_at` 的结算记录(`settle_type IN (1,3)`),匹配 `(site_id, assistant_id, member_id)`
2. 合并运行顺序ETL 完成后,统一编排器按顺序执行:
```
ETL 完成 → HTTP callback → 后端编排器:
Step 1: recall_detector.run() # 先检查完成(含回溯)
Step 2: task_generator.run() # 再生成新任务
```
3. 保留 `task_generator` 的每日 cron`0 4 * * *`)作为兜底
4. 两个服务保持独立文件,仅在调度层串联
**文件**`apps/backend/app/services/recall_detector.py`、调度编排器
### Fix-12ETL 完成后自动触发HTTP callback
**问题**`etl_data_updated` 事件无调用方recall_detector 仅靠手动触发。
**业务影响**ETL 每小时同步完新的飞球数据后,系统不会自动检查召回完成情况。客户回店后,助教看不到任务完成,必须有人手动在管理后台触发。
**修复方案**ETL `api_full` pipeline 完成后,通过 HTTP callback 通知后端。
实现:
1. 后端新增 API`POST /api/internal/etl-completed`(内部接口,仅限本机调用)
2. 该接口触发统一编排:`recall_detector.run()` → `task_generator.run()`
3. ETL orchestrator 在 pipeline 完成后调用此接口
4. 安全:校验来源 IP 或 shared secret
**文件**
- `apps/backend/app/routers/internal.py`(新增)
- `apps/etl/connectors/feiqiu/orchestration/` — pipeline 完成回调
- `apps/backend/app/services/trigger_scheduler.py` — 编排逻辑
### Fix-13任务完成统计三维度
**需求确认**:任务完成分三个统计维度:
| 统计维度 | 触发条件 | 是否需已有任务 | 判定数据源 |
|----------|----------|---------------|-----------|
| 召回完成(广义) | 关联客户来店 + 有结算单 | 不需要,只要是 `dws_member_assistant_relation_index` 中的关联客户 | `dwd_settlement_head``settle_type IN (1,3)` |
| 优先/高优先召回完成 | 已有 `priority_recall` / `high_priority_recall` 任务 + 客户到店 | 需要 | 同上 + `coach_tasks.status = 'completed'` |
| 回访完成 | 完成 `follow_up_visit` 任务(含回溯完成) | 需要 | `coach_tasks` + 回溯检测 |
关键定义:
- **"关联客户"**`dws_member_assistant_relation_index` 中有关系记录的客户
- **"来店"判定**`dwd_settlement_head` 中有结算单(`settle_type IN (1,3)`
- **时间窗口**:不限间隔,只要来了就算一次
- **统计时间范围**:全量(任务系统上线至今)
完成类型标记(两种都要记录):
| 完成类型 | 触发方式 | 当前状态 |
|----------|----------|----------|
| 自动完成 | 客户到店,系统匹配结算记录 | 逻辑存在但有 BugFix-11 + Fix-12 |
| 手动完成 | 助教在小程序中主动标记 | 当前不存在,需新建 |
手动完成需要:
- 小程序端:任务卡片增加"标记完成"按钮
- 后端 API`POST /api/xcx/tasks/{task_id}/complete`body: `{type: "manual", note?: string}`
- 数据库:`coach_tasks` 增加 `completion_type` 字段(`auto` / `manual`
- `coach_task_history` 增加 `action='manual_completed'`
### Fix-14助教看板 task 维度展示
**展示位置**:微信小程序 board-coach 页面 task_desc 排序维度
**统计口径**(按助教/按月):
- 召回完成数:`coach_task_history` 中 `action='completed'` 的任务数(优先+高优先)
- 回访完成数:`follow_up_visit` 类型的完成数(含自动+手动+回溯)
- 广义召回(关联客户来店):从 `dwd_settlement_head` + 关系表统计
**数据来源**`biz.coach_tasks` + `biz.coach_task_history` + ETL 关系/结算表
**时间范围**:全量
**后续**:更详细的趋势报表作为独立需求
### 实施依赖关系
```
Fix-11时序Bug+编排顺序)──┐
├→ Fix-14看板展示
Fix-12ETL 自动触发)────────┤
Fix-13手动完成+三维度统计)─┘
```
建议实施顺序Fix-11 → Fix-12 → Fix-13 → Fix-14

View File

@@ -0,0 +1,171 @@
# 财务看板优化 PRD
> 日期2026-04-08
> 触发:财务看板数据评估,发现数据失真、展示缺陷、指标缺失
---
## 一、Bug 修复
### 1.1 [P2] 会员卡余额快照不变动
**问题**`dws_finance_recharge_summary` 表中 `cash_card_balance`62,674`total_card_balance`67,760连续多日完全相同。当前实际余额约 118KETL 快照值 67K数字严重失真。
**根因**(已调研确认):
`finance_recharge_task.py` 中:
1. `_extract_card_balances()` 只调用一次(传入 `end_date`),所有日期复用同一快照(第 64、89 行)
2. SQL 查询只取 `scd2_is_current = 1``stat_date` 参数传入但未使用(第 197-216 行)
3. 赠送卡余额同样受影响
**DWD 维度表验证**(已确认可用):
- `dim_member_card_account` SCD2 版本充足(主卡类 55,526 个版本,覆盖 53 天)
- 按日有明确余额变化4/6: ¥3,745 → 4/7: ¥9,291 → 4/8: ¥1,587
- 可通过 `scd2_start_time <= stat_date AND scd2_end_time > stat_date` 做 as-of 查询
**修复方案**
- `_extract_card_balances()` 改为按日遍历,每天查该日生效的 SCD2 版本
- `transform()` 中按 `stat_date` 查表,不再共用同一快照
**状态**:待实施
### 1.2 [P2] 充值首充/续费笔数全为 0
**问题**`dws_finance_daily_summary``first_recharge_count``renewal_count` 全为 0`recharge_count` 有值。
**DWD 验证**(已确认源数据正确):
- `dwd_recharge_order.is_first` 字段完好
- 3 月:首充 6 笔 ¥14,996 / 续费 43 笔 ¥204,998
- ETL 聚合时未读取 `is_first` 字段进行分类统计
**修复方案**`finance_recharge_task.py` 中按 `is_first` 分组统计,写入 `first_recharge_count` / `renewal_count`
**状态**:待实施
---
## 二、文案优化
### 2.1 "储值卡结算冲销" → "储值卡消费抵扣"
**位置**
- `board_service.py:984` — 空降级值
- `board_service.py:1023` — 正常取值
- `fdw_queries.py:2898` — 查询层
**状态**:待实施
---
## 三、新增指标
### 3.1 经营一览 — 补充效率指标
在经营一览板块"实收流水"数据下方新增一行3 个指标横向排列:
```
┌──────────┬──────────┬──────────┐
│ 开台数 │ 客单价 │ 日均额 │
│ 3,262 │ ¥128.7 │ ¥13,991 │
└──────────┴──────────┴──────────┘
```
#### 计算方案
> 注意:客单价和日均额的分子使用**确认收入**(发生额-优惠后的实际入账),与所在位置"实收流水"语境一致。
| 指标 | 公式 | 数据来源 | 说明 |
|---|---|---|---|
| 开台数 | `SUM(order_count)` | `dws_finance_daily_summary.order_count` | 日期范围内结算单总数 |
| 客单价 | `SUM(confirmed_income) / SUM(order_count)` | 同上 | 确认收入 / 开台数 |
| 日均额 | `SUM(confirmed_income) / COUNT(DISTINCT stat_date)` | 同上 | 确认收入 / 营业天数 |
3 月实测值:开台 3,262 | 客单价 ¥128.7 | 日均额 ¥13,991
#### 展示方案
- 位置:经营一览板块"实收流水"数据下方
- 样式3 列等宽卡片,灰色标签 + 黑色数值
- 环比支持compare=1 时显示环比变化)
- 区域过滤area!=all 时,从 `dws_finance_area_daily` 对应 area_code 取 `order_count` + `confirmed_income`
#### 实现影响
- **后端**`fdw_queries.get_finance_overview()` 追加返回 `order_count``board_service._build_overview()` 计算 `avg_per_order``avg_daily`
- **前端**`board-finance.wxml` 经营一览实收流水下方加一行三列
### 3.2 预收资产 — 补充充值笔数
在储值卡充值实收行追加笔数信息:
```
储值卡充值实收 ¥222,994 50笔
```
> 首充/续费区分待 Bug 1.2 修复后展开为 `50笔首充6 / 续费44`
#### 计算方案
| 指标 | 公式 | 数据来源 |
|---|---|---|
| 充值笔数 | `SUM(recharge_count)` | `dws_finance_recharge_summary.recharge_count` |
#### 展示方案
- 位置:储值卡充值实收金额右侧
- 格式:`50笔`
- 环比:暂不做
### 3.3 应计收入 — 优惠扣减增加占比
每个优惠项显示金额 + 占总优惠的百分比:
```
团购优惠 ¥48,231 23.6%
会员折扣 ¥89,102 43.6%
手动调整 ¥31,540 15.4%
赠送卡抵扣 ¥28,230 13.8%
其他优惠 ¥7,209 3.5%
────────────────────────
总优惠 ¥204,312 100%
```
#### 计算方案
| 指标 | 公式 |
|---|---|
| 各项占比 | `该项金额 / discount_total * 100` |
#### 展示方案
- 位置:每个优惠项金额右侧
- 格式:`23.6%`,灰色小字
- 仅在总优惠 > 0 时显示占比
### 3.4 团购展示优化
#### 背景
团购数据上传时已上传的是**结算金额**(扣除平台各种扣费后的实际回款),因此不需要额外展示手续费和面值拆分。
#### 方案
将现金流入板块中"团购平台"标签改为"团购结算",明确表达该金额是平台结算后到账金额,避免与核销面值混淆。
**位置**:后端 `board_service.py` / `fdw_queries.py` 中 cashflow 板块的团购项 label
**状态**:待实施
---
## 四、实施优先级
| 优先级 | 事项 | 工作量 |
|---|---|---|
| P1 | 文案优化(储值卡消费抵扣) | 小 |
| P1 | 经营一览补充效率指标(开台数/客单价/日均额) | 中 |
| P1 | 团购标签改为"团购结算" | 小 |
| P2 | 优惠占比展示 | 小 |
| P2 | 充值笔数展示 | 小 |
| P2 | 卡余额快照修复Bug 1.1 | 中 |
| P2 | 充值首充/续费识别修复Bug 1.2 | 小 |

12
start-admin.bat Normal file
View File

@@ -0,0 +1,12 @@
@echo off
chcp 65001 >nul 2>&1
:: CHANGE 2026-03-07 | 注入 NEOZQYY_LAUNCH_DIR%~dp0 不穿透 junction
:: ps1 脚本用它定位项目根目录,避免 PowerShell 路径解析到 D 盘
set "NEOZQYY_LAUNCH_DIR=%~dp0"
:: 拉起 ps1 脚本(优先 pwsh 7回退 powershell 5.1
where pwsh >nul 2>&1
if %errorlevel%==0 (
start "NeoZQYY Launcher" pwsh -ExecutionPolicy Bypass -File "%~dp0scripts\ops\start-admin.ps1"
) else (
start "NeoZQYY Launcher" powershell -ExecutionPolicy Bypass -File "%~dp0scripts\ops\start-admin.ps1"
)

2
uv.lock generated
View File

@@ -584,6 +584,7 @@ name = "etl-feiqiu"
version = "0.1.0" version = "0.1.0"
source = { virtual = "apps/etl/connectors/feiqiu" } source = { virtual = "apps/etl/connectors/feiqiu" }
dependencies = [ dependencies = [
{ name = "httpx" },
{ name = "neozqyy-shared" }, { name = "neozqyy-shared" },
{ name = "openpyxl" }, { name = "openpyxl" },
{ name = "psycopg2-binary" }, { name = "psycopg2-binary" },
@@ -595,6 +596,7 @@ dependencies = [
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "httpx", specifier = ">=0.24.0" },
{ name = "neozqyy-shared", editable = "packages/shared" }, { name = "neozqyy-shared", editable = "packages/shared" },
{ name = "openpyxl", specifier = ">=3.1.0" }, { name = "openpyxl", specifier = ">=3.1.0" },
{ name = "psycopg2-binary", specifier = ">=2.9.0" }, { name = "psycopg2-binary", specifier = ">=2.9.0" },