阶段性更新
This commit is contained in:
@@ -1,31 +1,50 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""支付记录加载器"""
|
||||
"""支付事实表加载器"""
|
||||
from ..base_loader import BaseLoader
|
||||
|
||||
class PaymentLoader(BaseLoader):
|
||||
"""支付记录加载器"""
|
||||
"""支付数据加载器"""
|
||||
|
||||
def upsert_payments(self, records: list, store_id: int) -> tuple:
|
||||
"""加载支付记录"""
|
||||
"""加载支付数据"""
|
||||
if not records:
|
||||
return (0, 0, 0)
|
||||
|
||||
sql = """
|
||||
INSERT INTO billiards.fact_payment (
|
||||
store_id, pay_id, order_id, pay_time, pay_amount,
|
||||
pay_type, pay_status, remark, raw_data
|
||||
store_id, pay_id, site_id, tenant_id,
|
||||
order_settle_id, order_trade_no,
|
||||
relate_type, relate_id,
|
||||
create_time, pay_time,
|
||||
pay_amount, fee_amount, discount_amount,
|
||||
payment_method, online_pay_channel, pay_terminal,
|
||||
pay_status, raw_data
|
||||
)
|
||||
VALUES (
|
||||
%(store_id)s, %(pay_id)s, %(order_id)s, %(pay_time)s, %(pay_amount)s,
|
||||
%(pay_type)s, %(pay_status)s, %(remark)s, %(raw_data)s
|
||||
%(store_id)s, %(pay_id)s, %(site_id)s, %(tenant_id)s,
|
||||
%(order_settle_id)s, %(order_trade_no)s,
|
||||
%(relate_type)s, %(relate_id)s,
|
||||
%(create_time)s, %(pay_time)s,
|
||||
%(pay_amount)s, %(fee_amount)s, %(discount_amount)s,
|
||||
%(payment_method)s, %(online_pay_channel)s, %(pay_terminal)s,
|
||||
%(pay_status)s, %(raw_data)s
|
||||
)
|
||||
ON CONFLICT (store_id, pay_id) DO UPDATE SET
|
||||
order_id = EXCLUDED.order_id,
|
||||
order_settle_id = EXCLUDED.order_settle_id,
|
||||
order_trade_no = EXCLUDED.order_trade_no,
|
||||
relate_type = EXCLUDED.relate_type,
|
||||
relate_id = EXCLUDED.relate_id,
|
||||
site_id = EXCLUDED.site_id,
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
create_time = EXCLUDED.create_time,
|
||||
pay_time = EXCLUDED.pay_time,
|
||||
pay_amount = EXCLUDED.pay_amount,
|
||||
pay_type = EXCLUDED.pay_type,
|
||||
fee_amount = EXCLUDED.fee_amount,
|
||||
discount_amount = EXCLUDED.discount_amount,
|
||||
payment_method = EXCLUDED.payment_method,
|
||||
online_pay_channel = EXCLUDED.online_pay_channel,
|
||||
pay_terminal = EXCLUDED.pay_terminal,
|
||||
pay_status = EXCLUDED.pay_status,
|
||||
remark = EXCLUDED.remark,
|
||||
raw_data = EXCLUDED.raw_data,
|
||||
updated_at = now()
|
||||
RETURNING (xmax = 0) AS inserted
|
||||
|
||||
188
etl_billiards/loaders/facts/ticket.py
Normal file
188
etl_billiards/loaders/facts/ticket.py
Normal file
@@ -0,0 +1,188 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""小票详情加载器"""
|
||||
from ..base_loader import BaseLoader
|
||||
import json
|
||||
|
||||
class TicketLoader(BaseLoader):
|
||||
"""
|
||||
Loader for parsing Ticket Detail JSON and populating DWD fact tables.
|
||||
Handles:
|
||||
- fact_order (Header)
|
||||
- fact_order_goods (Items)
|
||||
- fact_table_usage (Items)
|
||||
- fact_assistant_service (Items)
|
||||
"""
|
||||
|
||||
def process_tickets(self, tickets: list, store_id: int) -> tuple:
|
||||
"""
|
||||
Process a batch of ticket JSONs.
|
||||
Returns (inserted_count, error_count)
|
||||
"""
|
||||
inserted_count = 0
|
||||
error_count = 0
|
||||
|
||||
# Prepare batch lists
|
||||
orders = []
|
||||
goods_list = []
|
||||
table_usages = []
|
||||
assistant_services = []
|
||||
|
||||
for ticket in tickets:
|
||||
try:
|
||||
# 1. Parse Header (fact_order)
|
||||
root_data = ticket.get("data", {}).get("data", {})
|
||||
if not root_data:
|
||||
continue
|
||||
|
||||
order_settle_id = root_data.get("orderSettleId")
|
||||
if not order_settle_id:
|
||||
continue
|
||||
|
||||
orders.append({
|
||||
"store_id": store_id,
|
||||
"order_settle_id": order_settle_id,
|
||||
"order_trade_no": 0,
|
||||
"order_no": str(root_data.get("orderSettleNumber", "")),
|
||||
"member_id": 0,
|
||||
"pay_time": root_data.get("payTime"),
|
||||
"total_amount": root_data.get("consumeMoney", 0),
|
||||
"pay_amount": root_data.get("actualPayment", 0),
|
||||
"discount_amount": root_data.get("memberOfferAmount", 0),
|
||||
"coupon_amount": root_data.get("couponAmount", 0),
|
||||
"status": "PAID",
|
||||
"cashier_name": root_data.get("cashierName", ""),
|
||||
"remark": root_data.get("orderRemark", ""),
|
||||
"raw_data": json.dumps(ticket, ensure_ascii=False)
|
||||
})
|
||||
|
||||
# 2. Parse Items (orderItem list)
|
||||
order_items = root_data.get("orderItem", [])
|
||||
for item in order_items:
|
||||
order_trade_no = item.get("siteOrderId")
|
||||
|
||||
# 2.1 Table Ledger
|
||||
table_ledger = item.get("tableLedger")
|
||||
if table_ledger:
|
||||
table_usages.append({
|
||||
"store_id": store_id,
|
||||
"order_ledger_id": table_ledger.get("orderTableLedgerId"),
|
||||
"order_settle_id": order_settle_id,
|
||||
"table_id": table_ledger.get("siteTableId"),
|
||||
"table_name": table_ledger.get("tableName"),
|
||||
"start_time": table_ledger.get("chargeStartTime"),
|
||||
"end_time": table_ledger.get("chargeEndTime"),
|
||||
"duration_minutes": table_ledger.get("useDuration", 0),
|
||||
"total_amount": table_ledger.get("consumptionAmount", 0),
|
||||
"pay_amount": table_ledger.get("consumptionAmount", 0) - table_ledger.get("memberDiscountAmount", 0)
|
||||
})
|
||||
|
||||
# 2.2 Goods Ledgers
|
||||
goods_ledgers = item.get("goodsLedgers", [])
|
||||
for g in goods_ledgers:
|
||||
goods_list.append({
|
||||
"store_id": store_id,
|
||||
"order_goods_id": g.get("orderGoodsLedgerId"),
|
||||
"order_settle_id": order_settle_id,
|
||||
"order_trade_no": order_trade_no,
|
||||
"goods_id": g.get("siteGoodsId"),
|
||||
"goods_name": g.get("goodsName"),
|
||||
"quantity": g.get("goodsCount", 0),
|
||||
"unit_price": g.get("goodsPrice", 0),
|
||||
"total_amount": g.get("ledgerAmount", 0),
|
||||
"pay_amount": g.get("realGoodsMoney", 0)
|
||||
})
|
||||
|
||||
# 2.3 Assistant Services
|
||||
assistant_ledgers = item.get("assistantPlayWith", [])
|
||||
for a in assistant_ledgers:
|
||||
assistant_services.append({
|
||||
"store_id": store_id,
|
||||
"ledger_id": a.get("orderAssistantLedgerId"),
|
||||
"order_settle_id": order_settle_id,
|
||||
"assistant_id": a.get("assistantId"),
|
||||
"assistant_name": a.get("ledgerName"),
|
||||
"service_type": a.get("skillName", "Play"),
|
||||
"start_time": a.get("ledgerStartTime"),
|
||||
"end_time": a.get("ledgerEndTime"),
|
||||
"duration_minutes": int(a.get("ledgerCount", 0) / 60) if a.get("ledgerCount") else 0,
|
||||
"total_amount": a.get("ledgerAmount", 0),
|
||||
"pay_amount": a.get("ledgerAmount", 0)
|
||||
})
|
||||
|
||||
inserted_count += 1
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error parsing ticket: {e}", exc_info=True)
|
||||
error_count += 1
|
||||
|
||||
# 3. Batch Insert/Upsert
|
||||
if orders:
|
||||
self._upsert_orders(orders)
|
||||
if goods_list:
|
||||
self._upsert_goods(goods_list)
|
||||
if table_usages:
|
||||
self._upsert_table_usages(table_usages)
|
||||
if assistant_services:
|
||||
self._upsert_assistant_services(assistant_services)
|
||||
|
||||
return inserted_count, error_count
|
||||
|
||||
def _upsert_orders(self, rows):
|
||||
sql = """
|
||||
INSERT INTO billiards.fact_order (
|
||||
store_id, order_settle_id, order_trade_no, order_no, member_id,
|
||||
pay_time, total_amount, pay_amount, discount_amount, coupon_amount,
|
||||
status, cashier_name, remark, raw_data
|
||||
) VALUES (
|
||||
%(store_id)s, %(order_settle_id)s, %(order_trade_no)s, %(order_no)s, %(member_id)s,
|
||||
%(pay_time)s, %(total_amount)s, %(pay_amount)s, %(discount_amount)s, %(coupon_amount)s,
|
||||
%(status)s, %(cashier_name)s, %(remark)s, %(raw_data)s
|
||||
)
|
||||
ON CONFLICT (store_id, order_settle_id) DO UPDATE SET
|
||||
pay_time = EXCLUDED.pay_time,
|
||||
pay_amount = EXCLUDED.pay_amount,
|
||||
updated_at = now()
|
||||
"""
|
||||
self.db.batch_execute(sql, rows)
|
||||
|
||||
def _upsert_goods(self, rows):
|
||||
sql = """
|
||||
INSERT INTO billiards.fact_order_goods (
|
||||
store_id, order_goods_id, order_settle_id, order_trade_no,
|
||||
goods_id, goods_name, quantity, unit_price, total_amount, pay_amount
|
||||
) VALUES (
|
||||
%(store_id)s, %(order_goods_id)s, %(order_settle_id)s, %(order_trade_no)s,
|
||||
%(goods_id)s, %(goods_name)s, %(quantity)s, %(unit_price)s, %(total_amount)s, %(pay_amount)s
|
||||
)
|
||||
ON CONFLICT (store_id, order_goods_id) DO UPDATE SET
|
||||
pay_amount = EXCLUDED.pay_amount
|
||||
"""
|
||||
self.db.batch_execute(sql, rows)
|
||||
|
||||
def _upsert_table_usages(self, rows):
|
||||
sql = """
|
||||
INSERT INTO billiards.fact_table_usage (
|
||||
store_id, order_ledger_id, order_settle_id, table_id, table_name,
|
||||
start_time, end_time, duration_minutes, total_amount, pay_amount
|
||||
) VALUES (
|
||||
%(store_id)s, %(order_ledger_id)s, %(order_settle_id)s, %(table_id)s, %(table_name)s,
|
||||
%(start_time)s, %(end_time)s, %(duration_minutes)s, %(total_amount)s, %(pay_amount)s
|
||||
)
|
||||
ON CONFLICT (store_id, order_ledger_id) DO UPDATE SET
|
||||
pay_amount = EXCLUDED.pay_amount
|
||||
"""
|
||||
self.db.batch_execute(sql, rows)
|
||||
|
||||
def _upsert_assistant_services(self, rows):
|
||||
sql = """
|
||||
INSERT INTO billiards.fact_assistant_service (
|
||||
store_id, ledger_id, order_settle_id, assistant_id, assistant_name,
|
||||
service_type, start_time, end_time, duration_minutes, total_amount, pay_amount
|
||||
) VALUES (
|
||||
%(store_id)s, %(ledger_id)s, %(order_settle_id)s, %(assistant_id)s, %(assistant_name)s,
|
||||
%(service_type)s, %(start_time)s, %(end_time)s, %(duration_minutes)s, %(total_amount)s, %(pay_amount)s
|
||||
)
|
||||
ON CONFLICT (store_id, ledger_id) DO UPDATE SET
|
||||
pay_amount = EXCLUDED.pay_amount
|
||||
"""
|
||||
self.db.batch_execute(sql, rows)
|
||||
6
etl_billiards/loaders/ods/__init__.py
Normal file
6
etl_billiards/loaders/ods/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""ODS loader helpers."""
|
||||
|
||||
from .generic import GenericODSLoader
|
||||
|
||||
__all__ = ["GenericODSLoader"]
|
||||
67
etl_billiards/loaders/ods/generic.py
Normal file
67
etl_billiards/loaders/ods/generic.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Generic ODS loader that keeps raw payload + primary keys."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from typing import Iterable, Sequence
|
||||
|
||||
from ..base_loader import BaseLoader
|
||||
|
||||
|
||||
class GenericODSLoader(BaseLoader):
|
||||
"""Insert/update helper for ODS tables that share the same pattern."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
db_ops,
|
||||
table_name: str,
|
||||
columns: Sequence[str],
|
||||
conflict_columns: Sequence[str],
|
||||
):
|
||||
super().__init__(db_ops)
|
||||
if not conflict_columns:
|
||||
raise ValueError("conflict_columns must not be empty for ODS loader")
|
||||
self.table_name = table_name
|
||||
self.columns = list(columns)
|
||||
self.conflict_columns = list(conflict_columns)
|
||||
self._sql = self._build_sql()
|
||||
|
||||
def upsert_rows(self, rows: Iterable[dict]) -> tuple[int, int, int]:
|
||||
"""Insert/update the provided iterable of dictionaries."""
|
||||
rows = list(rows)
|
||||
if not rows:
|
||||
return (0, 0, 0)
|
||||
|
||||
normalized = [self._normalize_row(row) for row in rows]
|
||||
inserted, updated = self.db.batch_upsert_with_returning(
|
||||
self._sql, normalized, page_size=self._batch_size()
|
||||
)
|
||||
return inserted, updated, 0
|
||||
|
||||
def _build_sql(self) -> str:
|
||||
col_list = ", ".join(self.columns)
|
||||
placeholders = ", ".join(f"%({col})s" for col in self.columns)
|
||||
conflict_clause = ", ".join(self.conflict_columns)
|
||||
update_columns = [c for c in self.columns if c not in self.conflict_columns]
|
||||
set_clause = ", ".join(f"{col} = EXCLUDED.{col}" for col in update_columns)
|
||||
return (
|
||||
f"INSERT INTO {self.table_name} ({col_list}) "
|
||||
f"VALUES ({placeholders}) "
|
||||
f"ON CONFLICT ({conflict_clause}) DO UPDATE SET {set_clause} "
|
||||
f"RETURNING (xmax = 0) AS inserted"
|
||||
)
|
||||
|
||||
def _normalize_row(self, row: dict) -> dict:
|
||||
normalized = {}
|
||||
for col in self.columns:
|
||||
value = row.get(col)
|
||||
if col == "payload" and value is not None and not isinstance(value, str):
|
||||
normalized[col] = json.dumps(value, ensure_ascii=False)
|
||||
else:
|
||||
normalized[col] = value
|
||||
|
||||
if "fetched_at" in normalized and normalized["fetched_at"] is None:
|
||||
normalized["fetched_at"] = datetime.now(timezone.utc)
|
||||
|
||||
return normalized
|
||||
Reference in New Issue
Block a user