在前后端开发联调前 的提交20260223

This commit is contained in:
Neo
2026-02-23 23:02:20 +08:00
parent 254ccb1e77
commit fafc95e64c
1142 changed files with 10366960 additions and 36957 deletions

View File

@@ -32,13 +32,13 @@ def build_parser() -> argparse.ArgumentParser:
"--date-from",
type=str,
default=None,
help="数据获取起始日期 (YYYY-MM-DD)",
help="数据获取起始日期 (YYYY-MM-DD),默认 30 天前",
)
parser.add_argument(
"--date-to",
type=str,
default=None,
help="数据获取截止日期 (YYYY-MM-DD)",
help="数据获取截止日期 (YYYY-MM-DD),默认今天",
)
parser.add_argument(
"--limit",
@@ -58,17 +58,11 @@ def build_parser() -> argparse.ArgumentParser:
def resolve_output_dir() -> Path:
"""
确定输出目录:
1. 优先读取环境变量 SYSTEM_ANALYZE_ROOT
2. 回退到 docs/reports/
3. 确保目录存在(自动创建)
1. 从 .env 读取 SYSTEM_ANALYZE_ROOT
2. 确保目录存在(自动创建)
"""
env_root = os.environ.get("SYSTEM_ANALYZE_ROOT")
if env_root:
out = Path(env_root)
else:
out = Path("docs/reports")
out.mkdir(parents=True, exist_ok=True)
return out
from _env_paths import get_output_path
return get_output_path("SYSTEM_ANALYZE_ROOT")
def generate_output_filename(dt: "datetime") -> str:
@@ -86,48 +80,108 @@ def main() -> None:
5. 调用 dump_collection_results() 落盘
6. 输出采集摘要到 stdout
"""
from datetime import date as _date, datetime as _datetime
from dotenv import load_dotenv
from datetime import date as _date, datetime as _datetime, timedelta as _timedelta
# ── 1. 解析 CLI 参数 ──
parser = build_parser()
args = parser.parse_args()
# ── 2. 加载环境变量(分层叠加:根 .env < ETL .env < 环境变量) ──
# override=False 保证后加载的不覆盖先加载的环境变量
# 加载根 .env(最低优先级
load_dotenv(Path(".env"), override=False)
# 再加载 ETL 专属 .env中优先级
load_dotenv(Path("apps/etl/connectors/feiqiu/.env"), override=False)
# 真实环境变量(最高优先级)已自动存在于 os.environ
# ── 2. 加载环境变量 ──
# _env_paths 在 import 时已通过 Path(__file__).parents[2] / ".env" 绝对路径
# 加载根 .env,无需再用相对路径 load_dotenv避免 cwd 不在项目根时失效
output_dir = resolve_output_dir() # 触发 _env_paths import → 加载根 .env
# ── 3. 构造 AnalyzerConfig ──
date_from = _date.fromisoformat(args.date_from) if args.date_from else None
date_to = _date.fromisoformat(args.date_to) if args.date_to else None
tables = [t.strip() for t in args.tables.split(",")] if args.tables else None
output_dir = resolve_output_dir()
# ── 3. 构造基础参数 ──
date_to = _date.fromisoformat(args.date_to) if args.date_to else _date.today()
user_date_from = _date.fromisoformat(args.date_from) if args.date_from else None
target_limit = args.limit
tables_filter = [t.strip() for t in args.tables.split(",")] if args.tables else None
# CHANGE 2026-02-21 | 遵循 testing-env.md优先使用测试库 TEST_DB_DSN
pg_dsn = os.environ.get("TEST_DB_DSN") or os.environ.get("PG_DSN", "")
if not pg_dsn:
raise RuntimeError("TEST_DB_DSN 和 PG_DSN 均未定义,请检查根 .env 配置")
from dataflow_analyzer import AnalyzerConfig, ODS_SPECS, collect_all_tables, dump_collection_results
config = AnalyzerConfig(
date_from=date_from,
# CHANGE 2026-02-21 | API 凭证缺失时提前报错,避免静默产出空报告
api_base = os.environ.get("API_BASE", "")
api_token = os.environ.get("API_TOKEN", "")
store_id = os.environ.get("STORE_ID", "")
missing = [k for k, v in [("API_BASE", api_base), ("API_TOKEN", api_token), ("STORE_ID", store_id)] if not v]
if missing:
raise RuntimeError(
f"API 凭证缺失:{', '.join(missing)}"
f"请在根 .env 中配置,参考 .env.template"
)
base_kwargs = dict(
date_to=date_to,
limit=args.limit,
tables=tables,
limit=target_limit,
output_dir=output_dir,
pg_dsn=os.environ.get("DATABASE_URL") or os.environ.get("PG_DSN", ""),
api_base=os.environ.get("API_BASE", ""),
api_token=os.environ.get("API_TOKEN", ""),
store_id=os.environ.get("STORE_ID", ""),
pg_dsn=pg_dsn,
api_base=api_base,
api_token=api_token,
store_id=store_id,
)
# ── 4. 执行采集(使用本模块的 ODS_SPECS ──
# ── 4. 逐表自适应日期扩展采集 ──
# CHANGE 2026-02-21 | 策略10天 → 30天 → 90天3 个档位
expand_days = [10, 30, 90]
if user_date_from:
# 用户显式指定了 date_from不做自适应扩展
expand_days = []
initial_date_from = user_date_from
else:
initial_date_from = date_to - _timedelta(days=expand_days[0])
# 首轮采集
config = AnalyzerConfig(date_from=initial_date_from, tables=tables_filter, **base_kwargs)
results = collect_all_tables(config, specs=ODS_SPECS)
actual_date_from = initial_date_from
# 自适应扩展:对不满 target_limit 的表逐步扩大日期范围
# CHANGE 2026-02-21 | 维表time_fields=None不参与时间扩展其 API 不接受日期范围
_dim_tables = {s["table"] for s in ODS_SPECS if s.get("time_fields") is None}
if not user_date_from:
for days in expand_days[1:]:
short_tables = [r.table_name for r in results
if r.error is None
and r.record_count < target_limit
and r.table_name not in _dim_tables]
if not short_tables:
break # 所有表都满足了
wider_from = date_to - _timedelta(days=days)
print(f" [自适应扩展] {len(short_tables)} 张表不足 {target_limit} 条,扩展至 {wider_from} ~ {date_to}")
wider_config = AnalyzerConfig(
date_from=wider_from, tables=short_tables, **base_kwargs)
wider_results = collect_all_tables(wider_config, specs=ODS_SPECS)
# 用更宽范围的结果替换不满的表(仅当新结果记录数更多时)
wider_map = {r.table_name: r for r in wider_results}
for idx, r in enumerate(results):
if r.table_name in wider_map:
new_r = wider_map[r.table_name]
if new_r.record_count > r.record_count:
results[idx] = new_r
actual_date_from = wider_from
# ── 5. 落盘 ──
paths = dump_collection_results(results, output_dir)
# ── 5.1 将实际使用的 date_from/date_to 追加写入 manifest ──
import json as _json
manifest_path = output_dir / "collection_manifest.json"
if manifest_path.exists():
with open(manifest_path, "r", encoding="utf-8") as _f:
manifest_data = _json.load(_f)
manifest_data["date_from"] = str(actual_date_from)
manifest_data["date_to"] = str(date_to)
with open(manifest_path, "w", encoding="utf-8") as _f:
_json.dump(manifest_data, _f, ensure_ascii=False, indent=2)
# ── 6. 输出采集摘要 ──
now = _datetime.now()
filename = generate_output_filename(now)