392 lines
17 KiB
Python
392 lines
17 KiB
Python
# -*- coding: utf-8 -*-
|
||
"""Flow 运行器:Flow 定义、层→任务映射、校验编排。
|
||
|
||
从原 ETLScheduler 中提取 Flow 编排逻辑,委托 TaskExecutor 执行具体任务。
|
||
所有依赖通过构造函数注入,不自行创建资源。
|
||
|
||
术语说明:代码中保留 pipeline 参数名以兼容调用方,概念上统一使用 Flow。
|
||
"""
|
||
from __future__ import annotations
|
||
|
||
import logging
|
||
import uuid
|
||
from datetime import datetime, timedelta
|
||
from typing import Any, Dict, List, Optional
|
||
from zoneinfo import ZoneInfo
|
||
|
||
from tasks.verification import filter_verify_tables
|
||
from orchestration.topological_sort import topological_sort
|
||
|
||
|
||
class FlowRunner:
|
||
"""Flow 编排器:根据 Flow 定义执行多层 ETL 任务并可选运行后置校验。"""
|
||
|
||
# Flow 定义:每个 Flow 包含的层(从 scheduler.py 模块级常量迁移至此)
|
||
FLOW_LAYERS: dict[str, list[str]] = {
|
||
"api_ods": ["ODS"],
|
||
"api_ods_dwd": ["ODS", "DWD"],
|
||
"api_full": ["ODS", "DWD", "DWS", "INDEX"],
|
||
"ods_dwd": ["DWD"],
|
||
"dwd_dws": ["DWS"],
|
||
"dwd_dws_index": ["DWS", "INDEX"],
|
||
"dwd_index": ["INDEX"],
|
||
}
|
||
|
||
def __init__(
|
||
self,
|
||
config,
|
||
task_executor,
|
||
task_registry,
|
||
db_conn,
|
||
api_client,
|
||
logger: logging.Logger,
|
||
):
|
||
self.config = config
|
||
self.task_executor = task_executor
|
||
self.task_registry = task_registry
|
||
self.db_conn = db_conn
|
||
self.api_client = api_client
|
||
self.logger = logger
|
||
self.tz = ZoneInfo(config.get("app.timezone", "Asia/Shanghai"))
|
||
|
||
def run(
|
||
self,
|
||
pipeline: str | None = None,
|
||
layers: list[str] | None = None,
|
||
processing_mode: str = "increment_only",
|
||
data_source: str = "hybrid",
|
||
window_start: datetime | None = None,
|
||
window_end: datetime | None = None,
|
||
window_split: str | None = None,
|
||
task_codes: list[str] | None = None,
|
||
fetch_before_verify: bool = False,
|
||
verify_tables: list[str] | None = None,
|
||
) -> dict[str, Any]:
|
||
"""执行 Flow,返回汇总结果。
|
||
|
||
Args:
|
||
pipeline: Flow 名称 (api_ods, api_ods_dwd, api_full, ...),与 layers 二选一(参数名保留以兼容调用方)
|
||
layers: 直接指定层列表 (["ODS", "DWD"] 等),与 flow 名称二选一
|
||
processing_mode: 处理模式 (increment_only / verify_only / increment_verify)
|
||
data_source: 数据源模式 (online / offline / hybrid)
|
||
window_start: 时间窗口开始
|
||
window_end: 时间窗口结束
|
||
window_split: 时间窗口切分 (none / day / week / month)
|
||
task_codes: 要执行的任务代码列表(作为 Flow 内的任务过滤器)
|
||
fetch_before_verify: 校验前是否先从 API 获取数据(仅在 verify_only 模式下有效)
|
||
verify_tables: 指定校验的表名列表(可用于单表验证)
|
||
|
||
Returns:
|
||
执行结果字典,包含 status / pipeline / layers / results / verification_summary
|
||
"""
|
||
from utils.task_logger import TaskLogger
|
||
|
||
# 解析层列表:Flow 名称查找 或 直接使用 layers 参数
|
||
if pipeline is not None:
|
||
if pipeline not in self.FLOW_LAYERS:
|
||
raise ValueError(f"无效的 Flow 名称: {pipeline}")
|
||
resolved_layers = self.FLOW_LAYERS[pipeline]
|
||
run_label = pipeline
|
||
elif layers is not None:
|
||
resolved_layers = layers
|
||
run_label = f"layers({','.join(layers)})"
|
||
else:
|
||
raise ValueError("必须指定 flow 名称或 layers 参数之一")
|
||
|
||
run_uuid = uuid.uuid4().hex
|
||
flow_logger = TaskLogger(f"FLOW_{run_label.upper()}", self.logger)
|
||
flow_logger.start(f"开始执行 Flow: {run_label}")
|
||
|
||
layers = resolved_layers
|
||
results: list[dict[str, Any]] = []
|
||
verification_summary: dict[str, Any] | None = None
|
||
ods_dump_dirs: dict[str, str] = {}
|
||
use_local_json = bool(self.config.get("verification.ods_use_local_json", False))
|
||
|
||
# 设置默认时间窗口
|
||
if window_end is None:
|
||
window_end = datetime.now(self.tz)
|
||
if window_start is None:
|
||
window_start = window_end - timedelta(hours=24)
|
||
|
||
try:
|
||
if processing_mode == "verify_only":
|
||
# 仅校验模式
|
||
if fetch_before_verify:
|
||
self.logger.info("Flow %s: 校验模式(先获取 API 数据)", run_label)
|
||
|
||
if task_codes:
|
||
ods_tasks = [t for t in task_codes if t.startswith("ODS_")]
|
||
if ods_tasks:
|
||
self.logger.info("从 API 获取数据: %s", ods_tasks)
|
||
results = self.task_executor.run_tasks(ods_tasks, data_source=data_source)
|
||
else:
|
||
auto_tasks = self._resolve_tasks(["ODS"])
|
||
if auto_tasks:
|
||
self.logger.info("从 API 获取数据: %s", auto_tasks)
|
||
results = self.task_executor.run_tasks(auto_tasks, data_source=data_source)
|
||
|
||
ods_dump_dirs = {
|
||
r.get("task_code"): r.get("dump_dir")
|
||
for r in results
|
||
if r.get("task_code") and r.get("dump_dir")
|
||
}
|
||
self.logger.info("API 数据获取完成,开始校验并修复")
|
||
else:
|
||
self.logger.info("Flow %s: 仅校验模式,跳过增量 ETL,直接执行校验并修复", run_label)
|
||
|
||
verification_summary = self._run_verification(
|
||
layers=layers,
|
||
window_start=window_start,
|
||
window_end=window_end,
|
||
window_split=window_split,
|
||
fetch_from_api=fetch_before_verify,
|
||
ods_dump_dirs=ods_dump_dirs,
|
||
use_local_json=use_local_json,
|
||
verify_tables=verify_tables,
|
||
)
|
||
flow_logger.set_verification_result(verification_summary)
|
||
else:
|
||
# 增量 ETL(increment_only 或 increment_verify)
|
||
self.logger.info("Flow %s: 执行增量 ETL,层=%s", run_label, layers)
|
||
|
||
if task_codes:
|
||
results = self.task_executor.run_tasks(task_codes, data_source=data_source)
|
||
else:
|
||
auto_tasks = self._resolve_tasks(layers)
|
||
results = self.task_executor.run_tasks(auto_tasks, data_source=data_source)
|
||
|
||
# increment_verify 模式:增量后执行校验
|
||
if processing_mode == "increment_verify":
|
||
self.logger.info("Flow %s: 开始校验并修复", run_label)
|
||
verification_summary = self._run_verification(
|
||
layers=layers,
|
||
window_start=window_start,
|
||
window_end=window_end,
|
||
window_split=window_split,
|
||
ods_dump_dirs=ods_dump_dirs,
|
||
use_local_json=use_local_json,
|
||
verify_tables=verify_tables,
|
||
)
|
||
flow_logger.set_verification_result(verification_summary)
|
||
|
||
# 汇总计数
|
||
flow_logger.set_counts(
|
||
fetched=sum(r.get("counts", {}).get("fetched", 0) for r in results),
|
||
inserted=sum(r.get("counts", {}).get("inserted", 0) for r in results),
|
||
updated=sum(r.get("counts", {}).get("updated", 0) for r in results),
|
||
errors=sum(r.get("counts", {}).get("errors", 0) for r in results),
|
||
)
|
||
|
||
summary_text = flow_logger.end(status="成功")
|
||
self.logger.info("\n%s", summary_text)
|
||
|
||
return {
|
||
"status": "SUCCESS",
|
||
"pipeline": run_label,
|
||
"layers": layers,
|
||
"results": results,
|
||
"verification_summary": verification_summary,
|
||
}
|
||
|
||
except Exception as exc:
|
||
summary_text = flow_logger.end(status="失败", error_message=str(exc))
|
||
self.logger.error("\n%s", summary_text)
|
||
raise
|
||
|
||
def _resolve_tasks(self, layers: list[str]) -> list[str]:
|
||
"""根据层列表解析任务代码。
|
||
|
||
优先级:配置值 > TaskRegistry.get_tasks_by_layer() > 空列表(记录警告)。
|
||
"""
|
||
# CHANGE [2026-07-17] intent: 移除所有硬编码回退列表,统一走 Registry(需求 7.1, 7.2, 7.3)
|
||
# assumptions: TaskRegistry 已注册所有层的任务;DWD 层新增 run.dwd_tasks 配置键
|
||
# prompt: "统一 _resolve_tasks() 去掉硬编码回退"
|
||
# 配置键与层名的映射
|
||
_LAYER_CONFIG_KEY = {
|
||
"ODS": "run.ods_tasks",
|
||
"DWD": "run.dwd_tasks",
|
||
"DWS": "run.dws_tasks",
|
||
"INDEX": "run.index_tasks",
|
||
}
|
||
|
||
tasks: list[str] = []
|
||
|
||
for layer in layers:
|
||
layer_upper = layer.upper()
|
||
|
||
# 1. 优先使用配置值
|
||
config_key = _LAYER_CONFIG_KEY.get(layer_upper)
|
||
if config_key:
|
||
config_tasks = self.config.get(config_key, [])
|
||
if config_tasks:
|
||
tasks.extend(config_tasks)
|
||
continue
|
||
|
||
# 2. 回退到 Registry
|
||
registry_tasks = self.task_registry.get_tasks_by_layer(layer_upper)
|
||
if registry_tasks:
|
||
tasks.extend(registry_tasks)
|
||
else:
|
||
# 3. Registry 也为空,记录警告并跳过
|
||
self.logger.warning(
|
||
"层 %s 在 Registry 中无已注册任务且无配置覆盖,跳过", layer_upper
|
||
)
|
||
|
||
# CHANGE [2026-07-18] intent: 对收集到的任务执行拓扑排序,确保依赖方在被依赖方之后(需求 8.3, 8.4, 8.5)
|
||
if tasks:
|
||
tasks = topological_sort(tasks, self.task_registry)
|
||
|
||
return tasks
|
||
|
||
|
||
def _run_verification(
|
||
self,
|
||
layers: list[str],
|
||
window_start: datetime,
|
||
window_end: datetime,
|
||
window_split: str | None = None,
|
||
fetch_from_api: bool = False,
|
||
ods_dump_dirs: dict[str, str] | None = None,
|
||
use_local_json: bool = False,
|
||
verify_tables: list[str] | None = None,
|
||
) -> dict[str, Any]:
|
||
"""对指定层执行后置校验(从原 _run_layer_verification 迁移)。"""
|
||
try:
|
||
from tasks.verification import get_verifier_for_layer, build_window_segments
|
||
except ImportError:
|
||
self.logger.warning("校验框架未安装,跳过后置校验")
|
||
return {"status": "SKIPPED", "message": "校验框架未安装"}
|
||
|
||
total_tables = 0
|
||
consistent_tables = 0
|
||
total_backfilled = 0
|
||
total_error_tables = 0
|
||
layer_results: dict[str, Any] = {}
|
||
skip_ods_on_fetch = bool(self.config.get("verification.skip_ods_when_fetch_before_verify", True))
|
||
ods_dump_dirs = ods_dump_dirs or {}
|
||
|
||
segments = build_window_segments(window_start, window_end, window_split)
|
||
|
||
for layer in layers:
|
||
try:
|
||
if layer.upper() == "ODS" and fetch_from_api and skip_ods_on_fetch:
|
||
self.logger.info("ODS 层在 fetch_before_verify 下已完成入库,跳过二次校验")
|
||
layer_results[layer] = {
|
||
"status": "SKIPPED",
|
||
"reason": "fetch_before_verify",
|
||
}
|
||
continue
|
||
|
||
# CHANGE [2025-07-18] intent: DWS/INDEX 层跳过完整性校验,仅记录日志(需求 6.5)
|
||
# assumptions: DWS/INDEX 层无轻量级 verifier,跳过最安全
|
||
# prompt: "实现 DWS/INDEX 层轻量级校验"
|
||
if layer.upper() in ("DWS", "INDEX"):
|
||
self.logger.info(
|
||
"DWS/INDEX 层使用轻量级校验,跳过完整性检查: %s", layer
|
||
)
|
||
layer_results[layer] = {
|
||
"status": "SKIPPED",
|
||
"reason": "lightweight_dws_index",
|
||
}
|
||
continue
|
||
|
||
if layer.upper() == "ODS" and fetch_from_api:
|
||
if use_local_json:
|
||
if not ods_dump_dirs:
|
||
self.logger.warning("ODS 校验配置为使用本地 JSON,但未找到 dump 目录,跳过 ODS 校验")
|
||
layer_results[layer] = {
|
||
"status": "SKIPPED",
|
||
"reason": "local_json_missing",
|
||
}
|
||
continue
|
||
verifier = get_verifier_for_layer(
|
||
layer,
|
||
self.db_conn,
|
||
self.logger,
|
||
api_client=self.api_client,
|
||
fetch_from_api=True,
|
||
local_dump_dirs=ods_dump_dirs,
|
||
use_local_json=True,
|
||
)
|
||
self.logger.info("ODS 层使用本地 JSON 校验(不请求 API)")
|
||
else:
|
||
verifier = get_verifier_for_layer(
|
||
layer,
|
||
self.db_conn,
|
||
self.logger,
|
||
api_client=self.api_client,
|
||
fetch_from_api=True,
|
||
)
|
||
self.logger.info("ODS 层启用 API 数据校验")
|
||
else:
|
||
verifier_kwargs: dict[str, Any] = {}
|
||
if layer.upper() == "INDEX":
|
||
try:
|
||
lookback_days = int(self.config.get("run.index_lookback_days", 60))
|
||
except (TypeError, ValueError):
|
||
lookback_days = 60
|
||
verifier_kwargs = {
|
||
"lookback_days": lookback_days,
|
||
"config": self.config,
|
||
}
|
||
self.logger.info("INDEX 层校验使用回溯天数: %s", lookback_days)
|
||
if layer.upper() == "DWD":
|
||
verifier_kwargs["config"] = self.config
|
||
verifier = get_verifier_for_layer(
|
||
layer,
|
||
self.db_conn,
|
||
self.logger,
|
||
**verifier_kwargs,
|
||
)
|
||
|
||
# 使用 filter_verify_tables 替代原内联静态方法
|
||
layer_tables = filter_verify_tables(layer, verify_tables)
|
||
if verify_tables and not layer_tables:
|
||
self.logger.info("层 %s 无匹配表,跳过校验", layer)
|
||
layer_results[layer] = {
|
||
"status": "SKIPPED",
|
||
"reason": "table_filter",
|
||
}
|
||
continue
|
||
|
||
self.logger.info("开始校验层: %s,时间窗口: %s ~ %s", layer, window_start, window_end)
|
||
|
||
layer_summary = verifier.verify_and_backfill(
|
||
window_start=window_start,
|
||
window_end=window_end,
|
||
auto_backfill=True,
|
||
split_unit=window_split or "month",
|
||
tables=layer_tables,
|
||
)
|
||
|
||
layer_results[layer] = layer_summary.to_dict() if hasattr(layer_summary, 'to_dict') else {}
|
||
|
||
if hasattr(layer_summary, 'total_tables'):
|
||
total_tables += layer_summary.total_tables
|
||
consistent_tables += layer_summary.consistent_tables
|
||
total_backfilled += layer_summary.total_backfilled
|
||
total_error_tables += getattr(layer_summary, 'error_tables', 0)
|
||
|
||
self.logger.info(
|
||
"层 %s 校验完成: 表数=%d, 一致=%d, 错误=%d, 补齐=%d",
|
||
layer,
|
||
getattr(layer_summary, 'total_tables', 0),
|
||
getattr(layer_summary, 'consistent_tables', 0),
|
||
getattr(layer_summary, 'error_tables', 0),
|
||
getattr(layer_summary, 'total_backfilled', 0),
|
||
)
|
||
|
||
except Exception as exc:
|
||
self.logger.error("层 %s 校验失败: %s", layer, exc, exc_info=True)
|
||
layer_results[layer] = {"status": "ERROR", "error": str(exc)}
|
||
|
||
return {
|
||
"status": "COMPLETED",
|
||
"total_tables": total_tables,
|
||
"consistent_tables": consistent_tables,
|
||
"total_backfilled": total_backfilled,
|
||
"error_tables": total_error_tables,
|
||
"layers": layer_results,
|
||
}
|