feat: 2026-04-15~05-02 累积变更基线 — AI 重构 + Runtime Context + DWS 修复
涵盖(每条对应已存的审计记录): - AI 模块拆分:apps/backend/app/ai/apps -> prompts/(8 个 APP + app2a 派生) audit: 2026-04-20__ai-module-complete.md - admin-web AI 管理套件:AIDashboard / AIOperations / AIRunLogs / AITriggers / TriggerManager audit: 2026-04-21__admin-web-ai-management-suite.md - App2 财务洞察 prompt v3 -> v5.1 + 小程序 AI 接入(chat / board-finance) audit: 2026-04-22__app2_prompt_v5_1_and_miniprogram_ai_insight.md - App2 prewarm 全过滤器 + AI 触发器 cron reschedule audit: 2026-04-21__app2-finance-prewarm-all-filters.md migration: 20260420_ai_trigger_jobs_and_app2_prewarm.sql / 20260421_app2_prewarm_cron_reschedule.sql - AppType 联合类型对齐 + adminAiAppTypes.test.ts audit: 2026-04-30__admin_web_ai_app_type_alignment.md - DashScope tokens_used 提取修复 audit: 2026-04-30__backend_dashscope_tokens_used_extraction.md - App3 线索完整详情 prompt audit: 2026-05-01__backend_app3_full_detail_prompt.md - Runtime Context 沙箱(5-1~5-2 主线): - 后端 schema/service + admin_runtime_context / xcx_runtime_clock 两个 router - admin-web RuntimeContext.tsx + miniprogram runtime-clock.ts - migration: 20260501__runtime_context_sandbox.sql - tools/db/verify_admin_web_sandbox.py + verify_sandbox_end_to_end.py - database/changes: 7 份 sandbox_* 验证报告 - 飞球 DWS 修复:finance_area_daily 区域汇总 + task_engine 调整 + RLS 视图业务日上界(migration 20260502 + scripts/ops/gen_rls_business_date_migration.py) 合规: - .gitignore 启用 tmp/ 排除 - 不入仓:apps/etl/connectors/feiqiu/.env(API_TOKEN secret,本地修改保留) 待验证清单: - docs/audit/changes/2026-05-04__cumulative_baseline_pending_verification.md 每个主题的功能完整性 / 上线验证几乎都未收口,按优先级 P0~P3 逐一处理
This commit is contained in:
@@ -35,9 +35,21 @@ class AdminAIService:
|
||||
|
||||
# ── Dashboard ─────────────────────────────────────────
|
||||
|
||||
async def get_dashboard(self, site_id: int | None = None) -> dict:
|
||||
"""聚合所有 Dashboard 数据。"""
|
||||
today_stats = await self._get_today_stats(site_id)
|
||||
async def get_dashboard(
|
||||
self,
|
||||
site_id: int | None = None,
|
||||
range_days: int | None = None,
|
||||
date_from: str | None = None,
|
||||
date_to: str | None = None,
|
||||
) -> dict:
|
||||
"""聚合所有 Dashboard 数据。
|
||||
|
||||
时间范围优先级:
|
||||
1. 若 date_from / date_to 同时给出(指定日期)→ 闭区间 [from, to]
|
||||
2. 若 range_days=N → [CURRENT_DATE - (N-1) days, 现在]
|
||||
3. 默认 range_days=1(今日)
|
||||
"""
|
||||
today_stats = await self._get_range_stats(site_id, range_days, date_from, date_to)
|
||||
trend_7d = await self._get_7d_trend(site_id)
|
||||
app_dist = await self._get_app_distribution(site_id)
|
||||
app_health = await self._get_app_health(site_id)
|
||||
@@ -52,9 +64,32 @@ class AdminAIService:
|
||||
"app_health": app_health,
|
||||
}
|
||||
|
||||
async def _get_today_stats(self, site_id: int | None) -> dict:
|
||||
"""今日调用次数、成功率、Token 消耗、平均延迟。"""
|
||||
site_clause, params = _site_filter(site_id)
|
||||
async def _get_range_stats(
|
||||
self,
|
||||
site_id: int | None,
|
||||
range_days: int | None,
|
||||
date_from: str | None,
|
||||
date_to: str | None,
|
||||
) -> dict:
|
||||
"""指定时间段内的调用次数、成功率、Token 消耗、平均延迟。
|
||||
|
||||
字段名沿用 today_* 前缀以兼容前端 DashboardResponse schema。
|
||||
"""
|
||||
site_clause, site_params = _site_filter(site_id)
|
||||
|
||||
if date_from and date_to:
|
||||
time_clause = "created_at >= %s::date AND created_at < (%s::date + INTERVAL '1 day')"
|
||||
time_params: tuple = (date_from, date_to)
|
||||
else:
|
||||
days = range_days if range_days and range_days > 0 else 1
|
||||
time_clause = (
|
||||
"created_at >= CURRENT_DATE - (%s::int - 1) * INTERVAL '1 day' "
|
||||
"AND created_at < CURRENT_DATE + INTERVAL '1 day'"
|
||||
)
|
||||
time_params = (days,)
|
||||
|
||||
params = time_params + site_params
|
||||
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
@@ -67,8 +102,7 @@ class AdminAIService:
|
||||
COALESCE(AVG(latency_ms) FILTER (WHERE latency_ms IS NOT NULL), 0)
|
||||
AS avg_latency
|
||||
FROM biz.ai_run_logs
|
||||
WHERE created_at >= CURRENT_DATE
|
||||
AND created_at < CURRENT_DATE + INTERVAL '1 day'
|
||||
WHERE {time_clause}
|
||||
{site_clause}
|
||||
""",
|
||||
params,
|
||||
@@ -466,6 +500,22 @@ class AdminAIService:
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
# Phase 1.4:广播 cache_invalidated 事件,admin-web / 小程序可实时刷新
|
||||
if affected > 0:
|
||||
try:
|
||||
from app.ai.event_bus import AIEvent, get_event_bus
|
||||
get_event_bus().publish(AIEvent(
|
||||
type="cache_invalidated",
|
||||
site_id=site_id,
|
||||
payload={
|
||||
"cache_type": app_type,
|
||||
"member_id": member_id,
|
||||
"affected": affected,
|
||||
},
|
||||
))
|
||||
except Exception:
|
||||
logger.debug("cache_invalidated 事件广播失败", exc_info=True)
|
||||
|
||||
return affected
|
||||
|
||||
# ── Token 预算 ────────────────────────────────────────
|
||||
@@ -699,6 +749,140 @@ class AdminAIService:
|
||||
|
||||
return "ignored"
|
||||
|
||||
# ── 触发器管理(biz.trigger_jobs)───────────────────────
|
||||
|
||||
async def list_triggers(self) -> list[dict]:
|
||||
"""列出所有 AI 相关触发器(job_type 以 ai_ 开头 + task_generator)。
|
||||
|
||||
返回字段:id / job_name / job_type / trigger_condition / trigger_config /
|
||||
status / description / last_run_at / next_run_at / last_error
|
||||
"""
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT id, job_name, job_type, trigger_condition,
|
||||
trigger_config, status, description,
|
||||
last_run_at, next_run_at, last_error
|
||||
FROM biz.trigger_jobs
|
||||
WHERE job_type LIKE 'ai_%' OR job_name = 'task_generator'
|
||||
ORDER BY trigger_condition DESC, job_name
|
||||
"""
|
||||
)
|
||||
cols = [d[0] for d in cur.description]
|
||||
rows = cur.fetchall()
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
return [_row_to_dict(cols, r) for r in rows]
|
||||
|
||||
async def update_trigger(
|
||||
self, trigger_id: int,
|
||||
status_new: str | None = None,
|
||||
cron_expression: str | None = None,
|
||||
description: str | None = None,
|
||||
) -> dict:
|
||||
"""更新触发器:启用/禁用、修改 cron、改描述。
|
||||
|
||||
仅允许修改 ai_ 前缀或 task_generator 的触发器。
|
||||
"""
|
||||
if status_new is not None and status_new not in ("enabled", "disabled"):
|
||||
raise ValueError(f"非法 status: {status_new}")
|
||||
|
||||
sets: list[str] = []
|
||||
params: list = []
|
||||
if status_new is not None:
|
||||
sets.append("status = %s")
|
||||
params.append(status_new)
|
||||
if cron_expression is not None:
|
||||
sets.append("trigger_config = jsonb_set(trigger_config, '{cron_expression}', to_jsonb(%s::text))")
|
||||
params.append(cron_expression)
|
||||
if description is not None:
|
||||
sets.append("description = %s")
|
||||
params.append(description)
|
||||
|
||||
if not sets:
|
||||
raise ValueError("至少修改一个字段")
|
||||
|
||||
params.append(trigger_id)
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
f"""
|
||||
UPDATE biz.trigger_jobs
|
||||
SET {", ".join(sets)}
|
||||
WHERE id = %s
|
||||
AND (job_type LIKE 'ai_%%' OR job_name = 'task_generator')
|
||||
RETURNING id, job_name, job_type, trigger_condition,
|
||||
trigger_config, status, description,
|
||||
last_run_at, next_run_at, last_error
|
||||
""",
|
||||
params,
|
||||
)
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
conn.rollback()
|
||||
raise ValueError("触发器不存在或不可修改")
|
||||
cols = [d[0] for d in cur.description]
|
||||
conn.commit()
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
return _row_to_dict(cols, row)
|
||||
|
||||
# ── 预热进度(app2_finance 72 组合)──────────────────────
|
||||
|
||||
async def get_prewarm_progress(self, site_id: int) -> dict:
|
||||
"""查询 app2_finance 72 组合缓存进度。
|
||||
|
||||
返回:total=72, done=N, missing=[{time_dimension, area}], last_updated
|
||||
"""
|
||||
time_dims = (
|
||||
"this_month", "last_month", "this_week", "last_week",
|
||||
"this_quarter", "last_quarter", "last_3_months", "last_6_months",
|
||||
)
|
||||
areas = (
|
||||
"all", "hall", "hallA", "hallB", "hallC",
|
||||
"vip", "snooker", "mahjong", "ktv",
|
||||
)
|
||||
expected = {f"{t}__{a}" for t in time_dims for a in areas}
|
||||
|
||||
conn = get_connection()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT target_id, max(created_at) AS last_updated
|
||||
FROM biz.ai_cache
|
||||
WHERE cache_type = 'app2_finance'
|
||||
AND site_id = %s
|
||||
AND target_id LIKE %s ESCAPE '\\'
|
||||
GROUP BY target_id
|
||||
""",
|
||||
(site_id, r'%\_\_%'),
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
done_map = {r[0]: r[1] for r in rows}
|
||||
missing = sorted(expected - set(done_map.keys()))
|
||||
last = max(done_map.values()) if done_map else None
|
||||
return {
|
||||
"total": len(expected),
|
||||
"done": len(expected & set(done_map.keys())),
|
||||
"missing": [
|
||||
{"target_id": m, "time_dimension": m.split("__")[0], "area": m.split("__")[1]}
|
||||
for m in missing
|
||||
],
|
||||
"last_updated": last.isoformat() if last else None,
|
||||
}
|
||||
|
||||
|
||||
# ── 工具函数 ──────────────────────────────────────────────
|
||||
|
||||
|
||||
Reference in New Issue
Block a user