feat: 2026-04-15~05-02 累积变更基线 — AI 重构 + Runtime Context + DWS 修复

涵盖(每条对应已存的审计记录):
- AI 模块拆分:apps/backend/app/ai/apps -> prompts/(8 个 APP + app2a 派生)
  audit: 2026-04-20__ai-module-complete.md
- admin-web AI 管理套件:AIDashboard / AIOperations / AIRunLogs / AITriggers / TriggerManager
  audit: 2026-04-21__admin-web-ai-management-suite.md
- App2 财务洞察 prompt v3 -> v5.1 + 小程序 AI 接入(chat / board-finance)
  audit: 2026-04-22__app2_prompt_v5_1_and_miniprogram_ai_insight.md
- App2 prewarm 全过滤器 + AI 触发器 cron reschedule
  audit: 2026-04-21__app2-finance-prewarm-all-filters.md
  migration: 20260420_ai_trigger_jobs_and_app2_prewarm.sql / 20260421_app2_prewarm_cron_reschedule.sql
- AppType 联合类型对齐 + adminAiAppTypes.test.ts
  audit: 2026-04-30__admin_web_ai_app_type_alignment.md
- DashScope tokens_used 提取修复
  audit: 2026-04-30__backend_dashscope_tokens_used_extraction.md
- App3 线索完整详情 prompt
  audit: 2026-05-01__backend_app3_full_detail_prompt.md
- Runtime Context 沙箱(5-1~5-2 主线):
  - 后端 schema/service + admin_runtime_context / xcx_runtime_clock 两个 router
  - admin-web RuntimeContext.tsx + miniprogram runtime-clock.ts
  - migration: 20260501__runtime_context_sandbox.sql
  - tools/db/verify_admin_web_sandbox.py + verify_sandbox_end_to_end.py
  - database/changes: 7 份 sandbox_* 验证报告
- 飞球 DWS 修复:finance_area_daily 区域汇总 + task_engine 调整
  + RLS 视图业务日上界(migration 20260502 + scripts/ops/gen_rls_business_date_migration.py)

合规:
- .gitignore 启用 tmp/ 排除
- 不入仓:apps/etl/connectors/feiqiu/.env(API_TOKEN secret,本地修改保留)

待验证清单:
- docs/audit/changes/2026-05-04__cumulative_baseline_pending_verification.md
  每个主题的功能完整性 / 上线验证几乎都未收口,按优先级 P0~P3 逐一处理
This commit is contained in:
Neo
2026-05-04 02:30:19 +08:00
parent 2010034840
commit caf179a5da
130 changed files with 14543 additions and 2717 deletions

View File

@@ -18,6 +18,12 @@ import logging
from datetime import datetime, timedelta, timezone
from app.database import get_connection
from app.services.runtime_context import (
LIVE_INSTANCE_ID,
MODE_LIVE,
MODE_SANDBOX,
get_runtime_context,
)
logger = logging.getLogger(__name__)
@@ -39,6 +45,14 @@ CACHE_MAX_PER_APP = 20_000
class AICacheService:
"""AI 缓存读写服务。"""
@staticmethod
def _runtime_scope(site_id: int, target_id: str, conn) -> tuple[str, str, str]:
"""返回运行模式、实例 ID 和实际 cache target_id。"""
ctx = get_runtime_context(site_id, conn=conn)
if ctx.is_sandbox and ctx.sandbox_instance_id:
return MODE_SANDBOX, ctx.sandbox_instance_id, f"{ctx.sandbox_instance_id}:{target_id}"
return MODE_LIVE, LIVE_INSTANCE_ID, target_id
def get_latest(
self,
cache_type: str,
@@ -52,6 +66,9 @@ class AICacheService:
"""
conn = get_connection()
try:
runtime_mode, sandbox_instance_id, scoped_target_id = self._runtime_scope(
site_id, target_id, conn
)
with conn.cursor() as cur:
cur.execute(
"""
@@ -60,12 +77,14 @@ class AICacheService:
created_at, expires_at, status
FROM biz.ai_cache
WHERE cache_type = %s AND site_id = %s AND target_id = %s
AND COALESCE(runtime_mode, 'live') = %s
AND COALESCE(sandbox_instance_id, 'live') = %s
AND (status = 'valid' OR status IS NULL)
AND (expires_at IS NULL OR expires_at > now())
ORDER BY created_at DESC
LIMIT 1
""",
(cache_type, site_id, target_id),
(cache_type, site_id, scoped_target_id, runtime_mode, sandbox_instance_id),
)
columns = [desc[0] for desc in cur.description]
row = cur.fetchone()
@@ -88,6 +107,9 @@ class AICacheService:
"""
conn = get_connection()
try:
runtime_mode, sandbox_instance_id, scoped_target_id = self._runtime_scope(
site_id, target_id, conn
)
with conn.cursor() as cur:
cur.execute(
"""
@@ -96,10 +118,12 @@ class AICacheService:
created_at, expires_at
FROM biz.ai_cache
WHERE cache_type = %s AND site_id = %s AND target_id = %s
AND COALESCE(runtime_mode, 'live') = %s
AND COALESCE(sandbox_instance_id, 'live') = %s
ORDER BY created_at DESC
LIMIT %s
""",
(cache_type, site_id, target_id, limit),
(cache_type, site_id, scoped_target_id, runtime_mode, sandbox_instance_id, limit),
)
columns = [desc[0] for desc in cur.description]
rows = cur.fetchall()
@@ -128,23 +152,29 @@ class AICacheService:
conn = get_connection()
try:
runtime_mode, sandbox_instance_id, scoped_target_id = self._runtime_scope(
site_id, target_id, conn
)
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO biz.ai_cache
(cache_type, site_id, target_id, result_json,
triggered_by, score, expires_at, status)
VALUES (%s, %s, %s, %s, %s, %s, %s, 'valid')
triggered_by, score, expires_at, status,
runtime_mode, sandbox_instance_id)
VALUES (%s, %s, %s, %s, %s, %s, %s, 'valid', %s, %s)
RETURNING id
""",
(
cache_type,
site_id,
target_id,
scoped_target_id,
json.dumps(result_json, ensure_ascii=False),
triggered_by,
score,
expires_at,
runtime_mode,
sandbox_instance_id,
),
)
row = cur.fetchone()
@@ -158,7 +188,7 @@ class AICacheService:
# 写入成功后清理超限记录
try:
deleted = self._cleanup_excess(cache_type, site_id, target_id)
deleted = self._cleanup_excess(cache_type, site_id, scoped_target_id)
if deleted > 0:
logger.info(
"清理超限缓存: cache_type=%s site_id=%s target_id=%s 删除=%d",
@@ -183,15 +213,19 @@ class AICacheService:
"""写入 generating 状态占位记录,返回 id。完成后调用 finalize_cache 更新。"""
conn = get_connection()
try:
runtime_mode, sandbox_instance_id, scoped_target_id = self._runtime_scope(
site_id, target_id, conn
)
with conn.cursor() as cur:
cur.execute(
"""
INSERT INTO biz.ai_cache
(cache_type, site_id, target_id, result_json, status, triggered_by)
VALUES (%s, %s, %s, '{}', 'generating', %s)
(cache_type, site_id, target_id, result_json, status, triggered_by,
runtime_mode, sandbox_instance_id)
VALUES (%s, %s, %s, '{}', 'generating', %s, %s, %s)
RETURNING id
""",
(cache_type, site_id, target_id, triggered_by),
(cache_type, site_id, scoped_target_id, triggered_by, runtime_mode, sandbox_instance_id),
)
row = cur.fetchone()
conn.commit()