在前后端开发联调前 的提交20260223

This commit is contained in:
Neo
2026-02-23 23:02:20 +08:00
parent 254ccb1e77
commit fafc95e64c
1142 changed files with 10366960 additions and 36957 deletions

View File

@@ -0,0 +1,276 @@
"""
从测试数据库导出完整 DDL按 schema 分文件写入 docs/database/ddl/。
以数据库现状为准,整合所有 schema/表/约束/索引/视图/物化视图/序列/FDW 配置。
输出文件:
docs/database/ddl/etl_feiqiu__meta.sql
docs/database/ddl/etl_feiqiu__ods.sql
docs/database/ddl/etl_feiqiu__dwd.sql
docs/database/ddl/etl_feiqiu__core.sql
docs/database/ddl/etl_feiqiu__dws.sql
docs/database/ddl/etl_feiqiu__app.sql
docs/database/ddl/zqyy_app__public.sql
docs/database/ddl/fdw.sql
用法cd C:\\NeoZQYY && python scripts/ops/gen_consolidated_ddl.py
"""
import os, sys
from pathlib import Path
from datetime import date
import psycopg2
# ── 环境 ──────────────────────────────────────────────────────────────────
from dotenv import load_dotenv
ROOT = Path(__file__).resolve().parent.parent.parent
load_dotenv(ROOT / ".env")
ETL_DSN = os.environ.get("TEST_DB_DSN") or os.environ.get("PG_DSN")
APP_DSN = os.environ.get("TEST_APP_DB_DSN") or os.environ.get("APP_DB_DSN")
if not ETL_DSN:
sys.exit("ERROR: TEST_DB_DSN / PG_DSN 未配置")
if not APP_DSN:
sys.exit("ERROR: TEST_APP_DB_DSN / APP_DB_DSN 未配置")
OUTPUT_DIR = ROOT / "docs" / "database" / "ddl"
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
FDW_FILE = ROOT / "db" / "fdw" / "setup_fdw.sql"
TODAY = date.today().isoformat()
# ── SQL 模板 ──────────────────────────────────────────────────────────────
SQL_TABLES = """
WITH cols AS (
SELECT table_schema, table_name,
string_agg(
format(E' %%I %%s%%s%%s',
column_name,
CASE WHEN data_type = 'USER-DEFINED' THEN udt_name
WHEN data_type = 'ARRAY' THEN udt_name
WHEN character_maximum_length IS NOT NULL THEN data_type || '(' || character_maximum_length || ')'
WHEN numeric_precision IS NOT NULL AND data_type IN ('numeric','decimal') THEN data_type || '(' || numeric_precision || ',' || numeric_scale || ')'
ELSE data_type END,
CASE WHEN column_default IS NOT NULL THEN ' DEFAULT ' || column_default ELSE '' END,
CASE WHEN is_nullable = 'NO' THEN ' NOT NULL' ELSE '' END
), E',\\n' ORDER BY ordinal_position
) as col_defs
FROM information_schema.columns
WHERE table_schema = %s
AND table_name IN (SELECT table_name FROM information_schema.tables WHERE table_schema = %s AND table_type = 'BASE TABLE')
GROUP BY table_schema, table_name
)
SELECT format(E'CREATE TABLE %%I.%%I (\\n%%s\\n);', table_schema, table_name, col_defs) as ddl
FROM cols ORDER BY table_name;
"""
SQL_CONSTRAINTS = """
SELECT n.nspname as schema, conrelid::regclass as tbl, conname,
pg_get_constraintdef(c.oid) as def, contype
FROM pg_constraint c
JOIN pg_namespace n ON n.oid = c.connamespace
WHERE n.nspname = %s AND contype IN ('p','u','f')
ORDER BY conrelid::regclass::text, contype, conname;
"""
SQL_INDEXES = """
SELECT indexname, indexdef
FROM pg_indexes
WHERE schemaname = %s
AND indexname NOT IN (SELECT conname FROM pg_constraint WHERE contype IN ('p','u'))
ORDER BY tablename, indexname;
"""
SQL_SEQUENCES = """
SELECT sequence_name, data_type
FROM information_schema.sequences
WHERE sequence_schema = %s
ORDER BY sequence_name;
"""
SQL_VIEWS = """
SELECT viewname, definition
FROM pg_views
WHERE schemaname = %s
ORDER BY viewname;
"""
SQL_MATVIEWS = """
SELECT matviewname, definition
FROM pg_matviews
WHERE schemaname = %s
ORDER BY matviewname;
"""
SQL_MV_INDEXES = """
SELECT indexname, indexdef
FROM pg_indexes
WHERE schemaname = %s
AND tablename LIKE 'mv_%%'
ORDER BY tablename, indexname;
"""
SQL_TABLE_COUNT = """
SELECT count(*) FROM information_schema.tables
WHERE table_schema = %s AND table_type = 'BASE TABLE';
"""
# ── 辅助函数 ──────────────────────────────────────────────────────────────
def query(conn, sql, params=None):
with conn.cursor() as cur:
cur.execute(sql, params)
return cur.fetchall()
def section(f, title, level=1):
sep = "=" * 77 if level == 1 else "-" * 77
f.write(f"\n-- {sep}\n-- {title}\n-- {sep}\n\n")
def write_sequences(f, conn, schema):
rows = query(conn, SQL_SEQUENCES, (schema,))
if not rows:
return
f.write("-- 序列\n")
for name, dtype in rows:
f.write(f"CREATE SEQUENCE IF NOT EXISTS {schema}.{name} AS {dtype};\n")
f.write("\n")
def write_tables(f, conn, schema):
rows = query(conn, SQL_TABLES, (schema, schema))
if not rows:
return
f.write("-- 表\n")
for (ddl,) in rows:
f.write(ddl + "\n\n")
def write_constraints(f, conn, schema):
rows = query(conn, SQL_CONSTRAINTS, (schema,))
if not rows:
return
f.write("-- 约束(主键 / 唯一 / 外键)\n")
for _, tbl, conname, condef, _ in rows:
f.write(f"ALTER TABLE {tbl} ADD CONSTRAINT {conname} {condef};\n")
f.write("\n")
def write_indexes(f, conn, schema):
rows = query(conn, SQL_INDEXES, (schema,))
if not rows:
return
f.write("-- 索引\n")
for _, indexdef in rows:
f.write(indexdef + ";\n")
f.write("\n")
def write_views(f, conn, schema):
rows = query(conn, SQL_VIEWS, (schema,))
if not rows:
return
f.write("-- 视图\n")
for vname, vdef in rows:
f.write(f"CREATE OR REPLACE VIEW {schema}.{vname} AS\n{vdef.strip()}\n;\n\n")
def write_matviews(f, conn, schema):
rows = query(conn, SQL_MATVIEWS, (schema,))
if not rows:
return
f.write("-- 物化视图\n")
for mvname, mvdef in rows:
f.write(f"CREATE MATERIALIZED VIEW {schema}.{mvname} AS\n{mvdef.strip()}\n;\n\n")
# 物化视图索引
idx_rows = query(conn, SQL_MV_INDEXES, (schema,))
if idx_rows:
f.write("-- 物化视图索引\n")
for _, indexdef in idx_rows:
f.write(indexdef + ";\n")
f.write("\n")
def write_schema_file(conn, db_name, schema, label, views_only=False):
"""为单个 schema 生成独立 DDL 文件。"""
filename = f"{db_name}__{schema}.sql"
filepath = OUTPUT_DIR / filename
# 获取表数量
table_count = query(conn, SQL_TABLE_COUNT, (schema,))[0][0]
with open(filepath, "w", encoding="utf-8") as f:
f.write(f"""\
-- =============================================================================
-- {db_name} / {schema}{label}
-- 生成日期:{TODAY}
-- 来源:测试库(通过脚本自动导出)
-- =============================================================================
CREATE SCHEMA IF NOT EXISTS {schema};
""")
if views_only:
write_views(f, conn, schema)
else:
write_sequences(f, conn, schema)
write_tables(f, conn, schema)
write_constraints(f, conn, schema)
write_indexes(f, conn, schema)
write_views(f, conn, schema)
write_matviews(f, conn, schema)
size_kb = filepath.stat().st_size / 1024
obj_desc = "仅视图" if views_only else f"{table_count}"
print(f"{filename:<35s} {size_kb:>6.1f} KB ({obj_desc})")
return filepath
def write_fdw_file():
"""输出 FDW 配置文件。"""
filepath = OUTPUT_DIR / "fdw.sql"
with open(filepath, "w", encoding="utf-8") as f:
f.write(f"""\
-- =============================================================================
-- FDW 跨库映射(在 zqyy_app 中执行)
-- 生成日期:{TODAY}
-- 来源db/fdw/setup_fdw.sql
-- =============================================================================
""")
if FDW_FILE.exists():
f.write(FDW_FILE.read_text(encoding="utf-8"))
f.write("\n")
else:
f.write("-- FDW 配置文件未找到db/fdw/setup_fdw.sql\n")
size_kb = filepath.stat().st_size / 1024
print(f"{'fdw.sql':<35s} {size_kb:>6.1f} KB")
return filepath
# ── 主流程 ────────────────────────────────────────────────────────────────
def main():
etl_conn = psycopg2.connect(ETL_DSN)
app_conn = psycopg2.connect(APP_DSN)
print(f"输出目录:{OUTPUT_DIR}\n")
# etl_feiqiu 六层 schema
write_schema_file(etl_conn, "etl_feiqiu", "meta", "ETL 调度元数据")
write_schema_file(etl_conn, "etl_feiqiu", "ods", "原始数据层")
write_schema_file(etl_conn, "etl_feiqiu", "dwd", "明细数据层")
write_schema_file(etl_conn, "etl_feiqiu", "core", "跨门店标准化维度/事实")
write_schema_file(etl_conn, "etl_feiqiu", "dws", "汇总数据层")
write_schema_file(etl_conn, "etl_feiqiu", "app", "RLS 视图层", views_only=True)
# zqyy_app
write_schema_file(app_conn, "zqyy_app", "public", "小程序业务表")
# FDW
write_fdw_file()
etl_conn.close()
app_conn.close()
# 删除旧的合并文件
old_file = ROOT / "docs" / "database" / "consolidated_ddl.sql"
if old_file.exists():
old_file.unlink()
print(f"\n🗑️ 已删除旧文件:{old_file.name}")
print(f"\n✅ 完成,共 8 个文件")
if __name__ == "__main__":
main()