初始提交:飞球 ETL 系统全量代码
This commit is contained in:
2
tasks/ods/__init__.py
Normal file
2
tasks/ods/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""ODS 层抓取任务"""
|
||||
81
tasks/ods/assistant_abolish_task.py
Normal file
81
tasks/ods/assistant_abolish_task.py
Normal file
@@ -0,0 +1,81 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""助教作废任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.facts.assistant_abolish import AssistantAbolishLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class AssistantAbolishTask(BaseTask):
|
||||
"""同步助教作废记录"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "ASSISTANT_ABOLISH"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params(
|
||||
{
|
||||
"siteId": context.store_id,
|
||||
"startTime": TypeParser.format_timestamp(context.window_start, self.tz),
|
||||
"endTime": TypeParser.format_timestamp(context.window_end, self.tz),
|
||||
}
|
||||
)
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/AssistantPerformance/GetAbolitionAssistant",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="abolitionAssistants",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_record(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = AssistantAbolishLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_records(transformed["records"])
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_record(self, raw: dict, store_id: int) -> dict | None:
|
||||
abolish_id = TypeParser.parse_int(raw.get("id"))
|
||||
if not abolish_id:
|
||||
self.logger.warning("跳过缺少作废ID的记录: %s", raw)
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"abolish_id": abolish_id,
|
||||
"table_id": TypeParser.parse_int(raw.get("tableId")),
|
||||
"table_name": raw.get("tableName"),
|
||||
"table_area_id": TypeParser.parse_int(raw.get("tableAreaId")),
|
||||
"table_area": raw.get("tableArea"),
|
||||
"assistant_no": raw.get("assistantOn"),
|
||||
"assistant_name": raw.get("assistantName"),
|
||||
"charge_minutes": TypeParser.parse_int(raw.get("pdChargeMinutes")),
|
||||
"abolish_amount": TypeParser.parse_decimal(raw.get("assistantAbolishAmount")),
|
||||
"create_time": TypeParser.parse_timestamp(
|
||||
raw.get("createTime") or raw.get("create_time"), self.tz
|
||||
),
|
||||
"trash_reason": raw.get("trashReason"),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
102
tasks/ods/assistants_task.py
Normal file
102
tasks/ods/assistants_task.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""助教账号任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.dimensions.assistant import AssistantLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class AssistantsTask(BaseTask):
|
||||
"""同步助教账号资料"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "ASSISTANTS"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params({"siteId": context.store_id})
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/PersonnelManagement/SearchAssistantInfo",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="assistantInfos",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_assistant(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = AssistantLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_assistants(transformed["records"])
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_assistant(self, raw: dict, store_id: int) -> dict | None:
|
||||
assistant_id = TypeParser.parse_int(raw.get("id"))
|
||||
if not assistant_id:
|
||||
self.logger.warning("跳过缺少助教ID的数据: %s", raw)
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"assistant_id": assistant_id,
|
||||
"assistant_no": raw.get("assistant_no") or raw.get("assistantNo"),
|
||||
"nickname": raw.get("nickname"),
|
||||
"real_name": raw.get("real_name") or raw.get("realName"),
|
||||
"gender": raw.get("gender"),
|
||||
"mobile": raw.get("mobile"),
|
||||
"level": raw.get("level"),
|
||||
"team_id": TypeParser.parse_int(raw.get("team_id") or raw.get("teamId")),
|
||||
"team_name": raw.get("team_name"),
|
||||
"assistant_status": raw.get("assistant_status"),
|
||||
"work_status": raw.get("work_status"),
|
||||
"entry_time": TypeParser.parse_timestamp(
|
||||
raw.get("entry_time") or raw.get("entryTime"), self.tz
|
||||
),
|
||||
"resign_time": TypeParser.parse_timestamp(
|
||||
raw.get("resign_time") or raw.get("resignTime"), self.tz
|
||||
),
|
||||
"start_time": TypeParser.parse_timestamp(
|
||||
raw.get("start_time") or raw.get("startTime"), self.tz
|
||||
),
|
||||
"end_time": TypeParser.parse_timestamp(
|
||||
raw.get("end_time") or raw.get("endTime"), self.tz
|
||||
),
|
||||
"create_time": TypeParser.parse_timestamp(
|
||||
raw.get("create_time") or raw.get("createTime"), self.tz
|
||||
),
|
||||
"update_time": TypeParser.parse_timestamp(
|
||||
raw.get("update_time") or raw.get("updateTime"), self.tz
|
||||
),
|
||||
"system_role_id": raw.get("system_role_id"),
|
||||
"online_status": raw.get("online_status"),
|
||||
"allow_cx": raw.get("allow_cx"),
|
||||
"charge_way": raw.get("charge_way"),
|
||||
"pd_unit_price": TypeParser.parse_decimal(raw.get("pd_unit_price")),
|
||||
"cx_unit_price": TypeParser.parse_decimal(raw.get("cx_unit_price")),
|
||||
"is_guaranteed": raw.get("is_guaranteed"),
|
||||
"is_team_leader": raw.get("is_team_leader"),
|
||||
"serial_number": raw.get("serial_number"),
|
||||
"show_sort": raw.get("show_sort"),
|
||||
"is_delete": raw.get("is_delete"),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
93
tasks/ods/coupon_usage_task.py
Normal file
93
tasks/ods/coupon_usage_task.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""平台券核销任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.facts.coupon_usage import CouponUsageLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class CouponUsageTask(BaseTask):
|
||||
"""同步平台券验券/核销记录"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "COUPON_USAGE"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params(
|
||||
{
|
||||
"siteId": context.store_id,
|
||||
"startTime": TypeParser.format_timestamp(context.window_start, self.tz),
|
||||
"endTime": TypeParser.format_timestamp(context.window_end, self.tz),
|
||||
}
|
||||
)
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/Promotion/GetOfflineCouponConsumePageList",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_usage(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = CouponUsageLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_coupon_usage(
|
||||
transformed["records"]
|
||||
)
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_usage(self, raw: dict, store_id: int) -> dict | None:
|
||||
usage_id = TypeParser.parse_int(raw.get("id"))
|
||||
if not usage_id:
|
||||
self.logger.warning("跳过缺少券核销ID的记录: %s", raw)
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"usage_id": usage_id,
|
||||
"coupon_code": raw.get("coupon_code"),
|
||||
"coupon_channel": raw.get("coupon_channel"),
|
||||
"coupon_name": raw.get("coupon_name"),
|
||||
"sale_price": TypeParser.parse_decimal(raw.get("sale_price")),
|
||||
"coupon_money": TypeParser.parse_decimal(raw.get("coupon_money")),
|
||||
"coupon_free_time": TypeParser.parse_int(raw.get("coupon_free_time")),
|
||||
"use_status": raw.get("use_status"),
|
||||
"create_time": TypeParser.parse_timestamp(
|
||||
raw.get("create_time") or raw.get("createTime"), self.tz
|
||||
),
|
||||
"consume_time": TypeParser.parse_timestamp(
|
||||
raw.get("consume_time") or raw.get("consumeTime"), self.tz
|
||||
),
|
||||
"operator_id": TypeParser.parse_int(raw.get("operator_id")),
|
||||
"operator_name": raw.get("operator_name"),
|
||||
"table_id": TypeParser.parse_int(raw.get("table_id")),
|
||||
"site_order_id": TypeParser.parse_int(raw.get("site_order_id")),
|
||||
"group_package_id": TypeParser.parse_int(raw.get("group_package_id")),
|
||||
"coupon_remark": raw.get("coupon_remark"),
|
||||
"deal_id": raw.get("deal_id"),
|
||||
"certificate_id": raw.get("certificate_id"),
|
||||
"verify_id": raw.get("verify_id"),
|
||||
"is_delete": raw.get("is_delete"),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
90
tasks/ods/inventory_change_task.py
Normal file
90
tasks/ods/inventory_change_task.py
Normal file
@@ -0,0 +1,90 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""库存变更任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.facts.inventory_change import InventoryChangeLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class InventoryChangeTask(BaseTask):
|
||||
"""同步库存变化记录"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "INVENTORY_CHANGE"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params(
|
||||
{
|
||||
"siteId": context.store_id,
|
||||
"startTime": TypeParser.format_timestamp(context.window_start, self.tz),
|
||||
"endTime": TypeParser.format_timestamp(context.window_end, self.tz),
|
||||
}
|
||||
)
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/GoodsStockManage/QueryGoodsOutboundReceipt",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="queryDeliveryRecordsList",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_change(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = InventoryChangeLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_changes(transformed["records"])
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_change(self, raw: dict, store_id: int) -> dict | None:
|
||||
change_id = TypeParser.parse_int(
|
||||
raw.get("siteGoodsStockId") or raw.get("site_goods_stock_id")
|
||||
)
|
||||
if not change_id:
|
||||
self.logger.warning("跳过缺少库存变动ID的记录: %s", raw)
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"change_id": change_id,
|
||||
"site_goods_id": TypeParser.parse_int(
|
||||
raw.get("siteGoodsId") or raw.get("site_goods_id")
|
||||
),
|
||||
"stock_type": raw.get("stockType") or raw.get("stock_type"),
|
||||
"goods_name": raw.get("goodsName"),
|
||||
"change_time": TypeParser.parse_timestamp(
|
||||
raw.get("createTime") or raw.get("create_time"), self.tz
|
||||
),
|
||||
"start_qty": TypeParser.parse_int(raw.get("startNum")),
|
||||
"end_qty": TypeParser.parse_int(raw.get("endNum")),
|
||||
"change_qty": TypeParser.parse_int(raw.get("changeNum")),
|
||||
"unit": raw.get("unit"),
|
||||
"price": TypeParser.parse_decimal(raw.get("price")),
|
||||
"operator_name": raw.get("operatorName"),
|
||||
"remark": raw.get("remark"),
|
||||
"goods_category_id": TypeParser.parse_int(raw.get("goodsCategoryId")),
|
||||
"goods_second_category_id": TypeParser.parse_int(
|
||||
raw.get("goodsSecondCategoryId")
|
||||
),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
115
tasks/ods/ledger_task.py
Normal file
115
tasks/ods/ledger_task.py
Normal file
@@ -0,0 +1,115 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""助教流水任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.facts.assistant_ledger import AssistantLedgerLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class LedgerTask(BaseTask):
|
||||
"""同步助教服务台账"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "LEDGER"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params(
|
||||
{
|
||||
"siteId": context.store_id,
|
||||
"startTime": TypeParser.format_timestamp(context.window_start, self.tz),
|
||||
"endTime": TypeParser.format_timestamp(context.window_end, self.tz),
|
||||
}
|
||||
)
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/AssistantPerformance/GetOrderAssistantDetails",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="orderAssistantDetails",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_ledger(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = AssistantLedgerLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_ledgers(transformed["records"])
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_ledger(self, raw: dict, store_id: int) -> dict | None:
|
||||
ledger_id = TypeParser.parse_int(raw.get("id"))
|
||||
if not ledger_id:
|
||||
self.logger.warning("跳过缺少助教流水ID的记录: %s", raw)
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"ledger_id": ledger_id,
|
||||
"assistant_no": raw.get("assistantNo"),
|
||||
"assistant_name": raw.get("assistantName"),
|
||||
"nickname": raw.get("nickname"),
|
||||
"level_name": raw.get("levelName"),
|
||||
"table_name": raw.get("tableName"),
|
||||
"ledger_unit_price": TypeParser.parse_decimal(raw.get("ledger_unit_price")),
|
||||
"ledger_count": TypeParser.parse_int(raw.get("ledger_count")),
|
||||
"ledger_amount": TypeParser.parse_decimal(raw.get("ledger_amount")),
|
||||
"projected_income": TypeParser.parse_decimal(raw.get("projected_income")),
|
||||
"service_money": TypeParser.parse_decimal(raw.get("service_money")),
|
||||
"member_discount_amount": TypeParser.parse_decimal(
|
||||
raw.get("member_discount_amount")
|
||||
),
|
||||
"manual_discount_amount": TypeParser.parse_decimal(
|
||||
raw.get("manual_discount_amount")
|
||||
),
|
||||
"coupon_deduct_money": TypeParser.parse_decimal(
|
||||
raw.get("coupon_deduct_money")
|
||||
),
|
||||
"order_trade_no": TypeParser.parse_int(raw.get("order_trade_no")),
|
||||
"order_settle_id": TypeParser.parse_int(raw.get("order_settle_id")),
|
||||
"operator_id": TypeParser.parse_int(raw.get("operator_id")),
|
||||
"operator_name": raw.get("operator_name"),
|
||||
"assistant_team_id": TypeParser.parse_int(raw.get("assistant_team_id")),
|
||||
"assistant_level": raw.get("assistant_level"),
|
||||
"site_table_id": TypeParser.parse_int(raw.get("site_table_id")),
|
||||
"order_assistant_id": TypeParser.parse_int(raw.get("order_assistant_id")),
|
||||
"site_assistant_id": TypeParser.parse_int(raw.get("site_assistant_id")),
|
||||
"user_id": TypeParser.parse_int(raw.get("user_id")),
|
||||
"ledger_start_time": TypeParser.parse_timestamp(
|
||||
raw.get("ledger_start_time"), self.tz
|
||||
),
|
||||
"ledger_end_time": TypeParser.parse_timestamp(
|
||||
raw.get("ledger_end_time"), self.tz
|
||||
),
|
||||
"start_use_time": TypeParser.parse_timestamp(raw.get("start_use_time"), self.tz),
|
||||
"last_use_time": TypeParser.parse_timestamp(raw.get("last_use_time"), self.tz),
|
||||
"income_seconds": TypeParser.parse_int(raw.get("income_seconds")),
|
||||
"real_use_seconds": TypeParser.parse_int(raw.get("real_use_seconds")),
|
||||
"is_trash": raw.get("is_trash"),
|
||||
"trash_reason": raw.get("trash_reason"),
|
||||
"is_confirm": raw.get("is_confirm"),
|
||||
"ledger_status": raw.get("ledger_status"),
|
||||
"create_time": TypeParser.parse_timestamp(
|
||||
raw.get("create_time") or raw.get("createTime"), self.tz
|
||||
),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
72
tasks/ods/members_task.py
Normal file
72
tasks/ods/members_task.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""会员ETL任务"""
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.dimensions.member import MemberLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class MembersTask(BaseTask):
|
||||
"""会员ETL任务"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "MEMBERS"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params({"siteId": context.store_id})
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/MemberProfile/GetTenantMemberList",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="tenantMemberInfos",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
parsed_row = self._parse_member(raw, context.store_id)
|
||||
if parsed_row:
|
||||
parsed.append(parsed_row)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = MemberLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_members(
|
||||
transformed["records"], context.store_id
|
||||
)
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_member(self, raw: dict, store_id: int) -> dict | None:
|
||||
"""解析会员记录"""
|
||||
try:
|
||||
member_id = TypeParser.parse_int(raw.get("memberId"))
|
||||
if not member_id:
|
||||
return None
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"member_id": member_id,
|
||||
"member_name": raw.get("memberName"),
|
||||
"phone": raw.get("phone"),
|
||||
"balance": TypeParser.parse_decimal(raw.get("balance")),
|
||||
"status": raw.get("status"),
|
||||
"register_time": TypeParser.parse_timestamp(raw.get("registerTime"), self.tz),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
except Exception as exc:
|
||||
self.logger.warning("解析会员记录失败: %s, 原始数据: %s", exc, raw)
|
||||
return None
|
||||
260
tasks/ods/ods_json_archive_task.py
Normal file
260
tasks/ods/ods_json_archive_task.py
Normal file
@@ -0,0 +1,260 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""在线抓取 ODS 相关接口并落盘为 JSON(用于后续离线回放/入库)。"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
from api.client import APIClient
|
||||
from models.parsers import TypeParser
|
||||
from utils.json_store import dump_json, endpoint_to_filename
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EndpointSpec:
|
||||
endpoint: str
|
||||
window_style: str # site | start_end | range | pay | none
|
||||
data_path: tuple[str, ...] = ("data",)
|
||||
list_key: str | None = None
|
||||
|
||||
|
||||
class OdsJsonArchiveTask(BaseTask):
|
||||
"""
|
||||
抓取一组 ODS 所需接口并落盘为“简化 JSON”:
|
||||
{"code": 0, "data": [...records...]}
|
||||
|
||||
说明:
|
||||
- 该输出格式与 tasks/manual_ingest_task.py 的解析逻辑兼容;
|
||||
- 默认每页一个文件,避免单文件过大;
|
||||
- 结算小票(/Order/GetOrderSettleTicketNew)按 orderSettleId 分文件写入。
|
||||
"""
|
||||
|
||||
ENDPOINTS: tuple[EndpointSpec, ...] = (
|
||||
EndpointSpec("/MemberProfile/GetTenantMemberList", "site", list_key="tenantMemberInfos"),
|
||||
EndpointSpec("/MemberProfile/GetTenantMemberCardList", "site", list_key="tenantMemberCards"),
|
||||
EndpointSpec("/MemberProfile/GetMemberCardBalanceChange", "start_end"),
|
||||
EndpointSpec("/PersonnelManagement/SearchAssistantInfo", "site", list_key="assistantInfos"),
|
||||
EndpointSpec(
|
||||
"/AssistantPerformance/GetOrderAssistantDetails",
|
||||
"start_end",
|
||||
list_key="orderAssistantDetails",
|
||||
),
|
||||
EndpointSpec(
|
||||
"/AssistantPerformance/GetAbolitionAssistant",
|
||||
"start_end",
|
||||
list_key="abolitionAssistants",
|
||||
),
|
||||
EndpointSpec("/Table/GetSiteTables", "site", list_key="siteTables"),
|
||||
EndpointSpec(
|
||||
"/TenantGoodsCategory/QueryPrimarySecondaryCategory",
|
||||
"site",
|
||||
list_key="goodsCategoryList",
|
||||
),
|
||||
EndpointSpec("/TenantGoods/QueryTenantGoods", "site", list_key="tenantGoodsList"),
|
||||
EndpointSpec("/TenantGoods/GetGoodsInventoryList", "site", list_key="orderGoodsList"),
|
||||
EndpointSpec("/TenantGoods/GetGoodsStockReport", "site"),
|
||||
EndpointSpec("/TenantGoods/GetGoodsSalesList", "start_end", list_key="orderGoodsLedgers"),
|
||||
EndpointSpec(
|
||||
"/PackageCoupon/QueryPackageCouponList",
|
||||
"site",
|
||||
list_key="packageCouponList",
|
||||
),
|
||||
EndpointSpec("/Site/GetSiteTableUseDetails", "start_end", list_key="siteTableUseDetailsList"),
|
||||
EndpointSpec("/Site/GetSiteTableOrderDetails", "start_end", list_key="siteTableUseDetailsList"),
|
||||
EndpointSpec("/Site/GetTaiFeeAdjustList", "start_end", list_key="taiFeeAdjustInfos"),
|
||||
EndpointSpec(
|
||||
"/GoodsStockManage/QueryGoodsOutboundReceipt",
|
||||
"start_end",
|
||||
list_key="queryDeliveryRecordsList",
|
||||
),
|
||||
EndpointSpec("/Promotion/GetOfflineCouponConsumePageList", "start_end"),
|
||||
EndpointSpec("/Order/GetRefundPayLogList", "start_end"),
|
||||
EndpointSpec("/Site/GetAllOrderSettleList", "range", list_key="settleList"),
|
||||
EndpointSpec("/Site/GetRechargeSettleList", "range", list_key="settleList"),
|
||||
EndpointSpec("/PayLog/GetPayLogListPage", "pay"),
|
||||
)
|
||||
|
||||
TICKET_ENDPOINT = "/Order/GetOrderSettleTicketNew"
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "ODS_JSON_ARCHIVE"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
base_client = getattr(self.api, "base", None) or self.api
|
||||
if not isinstance(base_client, APIClient):
|
||||
raise TypeError("ODS_JSON_ARCHIVE 需要 APIClient(在线抓取)")
|
||||
|
||||
output_dir = getattr(self.api, "output_dir", None)
|
||||
if output_dir:
|
||||
out = Path(output_dir)
|
||||
else:
|
||||
out = Path(self.config.get("pipeline.fetch_root") or self.config["pipeline"]["fetch_root"])
|
||||
out.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
write_pretty = bool(self.config.get("io.write_pretty_json", False))
|
||||
page_size = int(self.config.get("api.page_size", 200) or 200)
|
||||
store_id = int(context.store_id)
|
||||
|
||||
total_records = 0
|
||||
ticket_ids: set[int] = set()
|
||||
per_endpoint: list[dict] = []
|
||||
|
||||
self.logger.info(
|
||||
"ODS_JSON_ARCHIVE: 开始抓取,窗口[%s ~ %s] 输出目录=%s",
|
||||
context.window_start,
|
||||
context.window_end,
|
||||
out,
|
||||
)
|
||||
|
||||
for spec in self.ENDPOINTS:
|
||||
self.logger.info("ODS_JSON_ARCHIVE: 抓取 endpoint=%s", spec.endpoint)
|
||||
built_params = self._build_params(
|
||||
spec.window_style, store_id, context.window_start, context.window_end
|
||||
)
|
||||
# /TenantGoods/GetGoodsInventoryList 要求 siteId 为数组(标量会触发服务端异常,返回畸形状态行 HTTP/1.1 1400)
|
||||
if spec.endpoint == "/TenantGoods/GetGoodsInventoryList":
|
||||
built_params["siteId"] = [store_id]
|
||||
params = self._merge_common_params(built_params)
|
||||
|
||||
base_filename = endpoint_to_filename(spec.endpoint)
|
||||
stem = Path(base_filename).stem
|
||||
suffix = Path(base_filename).suffix or ".json"
|
||||
|
||||
endpoint_records = 0
|
||||
endpoint_pages = 0
|
||||
endpoint_error: str | None = None
|
||||
|
||||
try:
|
||||
for page_no, records, _, _ in base_client.iter_paginated(
|
||||
endpoint=spec.endpoint,
|
||||
params=params,
|
||||
page_size=page_size,
|
||||
data_path=spec.data_path,
|
||||
list_key=spec.list_key,
|
||||
):
|
||||
endpoint_pages += 1
|
||||
total_records += len(records)
|
||||
endpoint_records += len(records)
|
||||
|
||||
if spec.endpoint == "/PayLog/GetPayLogListPage":
|
||||
for rec in records or []:
|
||||
relate_id = TypeParser.parse_int(
|
||||
(rec or {}).get("relateId")
|
||||
or (rec or {}).get("orderSettleId")
|
||||
or (rec or {}).get("order_settle_id")
|
||||
)
|
||||
if relate_id:
|
||||
ticket_ids.add(relate_id)
|
||||
|
||||
out_path = out / f"{stem}__p{int(page_no):04d}{suffix}"
|
||||
dump_json(out_path, {"code": 0, "data": records}, pretty=write_pretty)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
endpoint_error = f"{type(exc).__name__}: {exc}"
|
||||
self.logger.error("ODS_JSON_ARCHIVE: 接口抓取失败 endpoint=%s err=%s", spec.endpoint, endpoint_error)
|
||||
|
||||
per_endpoint.append(
|
||||
{
|
||||
"endpoint": spec.endpoint,
|
||||
"file_stem": stem,
|
||||
"pages": endpoint_pages,
|
||||
"records": endpoint_records,
|
||||
"error": endpoint_error,
|
||||
}
|
||||
)
|
||||
if endpoint_error:
|
||||
self.logger.warning(
|
||||
"ODS_JSON_ARCHIVE: endpoint=%s 完成(失败)pages=%s records=%s err=%s",
|
||||
spec.endpoint,
|
||||
endpoint_pages,
|
||||
endpoint_records,
|
||||
endpoint_error,
|
||||
)
|
||||
else:
|
||||
self.logger.info(
|
||||
"ODS_JSON_ARCHIVE: endpoint=%s 完成 pages=%s records=%s",
|
||||
spec.endpoint,
|
||||
endpoint_pages,
|
||||
endpoint_records,
|
||||
)
|
||||
|
||||
# 小票详情:按 orderSettleId 获取
|
||||
ticket_ids_sorted = sorted(ticket_ids)
|
||||
self.logger.info("ODS_JSON_ARCHIVE: 小票候选数=%s", len(ticket_ids_sorted))
|
||||
|
||||
ticket_file_stem = Path(endpoint_to_filename(self.TICKET_ENDPOINT)).stem
|
||||
ticket_file_suffix = Path(endpoint_to_filename(self.TICKET_ENDPOINT)).suffix or ".json"
|
||||
ticket_records = 0
|
||||
|
||||
for order_settle_id in ticket_ids_sorted:
|
||||
params = self._merge_common_params({"orderSettleId": int(order_settle_id)})
|
||||
try:
|
||||
records, _ = base_client.get_paginated(
|
||||
endpoint=self.TICKET_ENDPOINT,
|
||||
params=params,
|
||||
page_size=None,
|
||||
data_path=("data",),
|
||||
list_key=None,
|
||||
)
|
||||
if not records:
|
||||
continue
|
||||
ticket_records += len(records)
|
||||
out_path = out / f"{ticket_file_stem}__{int(order_settle_id)}{ticket_file_suffix}"
|
||||
dump_json(out_path, {"code": 0, "data": records}, pretty=write_pretty)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
self.logger.error(
|
||||
"ODS_JSON_ARCHIVE: 小票抓取失败 orderSettleId=%s err=%s",
|
||||
order_settle_id,
|
||||
exc,
|
||||
)
|
||||
continue
|
||||
|
||||
total_records += ticket_records
|
||||
|
||||
manifest = {
|
||||
"task": self.get_task_code(),
|
||||
"store_id": store_id,
|
||||
"window_start": context.window_start.isoformat(),
|
||||
"window_end": context.window_end.isoformat(),
|
||||
"page_size": page_size,
|
||||
"total_records": total_records,
|
||||
"ticket_ids": len(ticket_ids_sorted),
|
||||
"ticket_records": ticket_records,
|
||||
"endpoints": per_endpoint,
|
||||
}
|
||||
manifest_path = out / "manifest.json"
|
||||
dump_json(manifest_path, manifest, pretty=True)
|
||||
if hasattr(self.api, "last_dump"):
|
||||
try:
|
||||
self.api.last_dump = {"file": str(manifest_path), "records": total_records, "pages": None}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.logger.info("ODS_JSON_ARCHIVE: 抓取完成,总记录数=%s(含小票=%s)", total_records, ticket_records)
|
||||
return {"fetched": total_records, "ticket_ids": len(ticket_ids_sorted)}
|
||||
|
||||
def _build_params(self, window_style: str, store_id: int, window_start, window_end) -> dict:
|
||||
if window_style == "none":
|
||||
return {}
|
||||
if window_style == "site":
|
||||
return {"siteId": store_id}
|
||||
if window_style == "range":
|
||||
return {
|
||||
"siteId": store_id,
|
||||
"rangeStartTime": TypeParser.format_timestamp(window_start, self.tz),
|
||||
"rangeEndTime": TypeParser.format_timestamp(window_end, self.tz),
|
||||
}
|
||||
if window_style == "pay":
|
||||
return {
|
||||
"siteId": store_id,
|
||||
"StartPayTime": TypeParser.format_timestamp(window_start, self.tz),
|
||||
"EndPayTime": TypeParser.format_timestamp(window_end, self.tz),
|
||||
}
|
||||
# 默认使用 startTime/endTime
|
||||
return {
|
||||
"siteId": store_id,
|
||||
"startTime": TypeParser.format_timestamp(window_start, self.tz),
|
||||
"endTime": TypeParser.format_timestamp(window_end, self.tz),
|
||||
}
|
||||
1769
tasks/ods/ods_tasks.py
Normal file
1769
tasks/ods/ods_tasks.py
Normal file
File diff suppressed because it is too large
Load Diff
91
tasks/ods/orders_task.py
Normal file
91
tasks/ods/orders_task.py
Normal file
@@ -0,0 +1,91 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""订单ETL任务"""
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.facts.order import OrderLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class OrdersTask(BaseTask):
|
||||
"""订单数据ETL任务"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "ORDERS"
|
||||
|
||||
# ------------------------------------------------------------------ E/T/L 钩子
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
"""调用 API 拉取订单记录"""
|
||||
params = self._merge_common_params(
|
||||
{
|
||||
"siteId": context.store_id,
|
||||
"rangeStartTime": TypeParser.format_timestamp(context.window_start, self.tz),
|
||||
"rangeEndTime": TypeParser.format_timestamp(context.window_end, self.tz),
|
||||
}
|
||||
)
|
||||
records, pages_meta = self.api.get_paginated(
|
||||
endpoint="/Site/GetAllOrderSettleList",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="settleList",
|
||||
)
|
||||
return {"records": records, "meta": pages_meta}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
"""解析原始订单 JSON"""
|
||||
parsed_records = []
|
||||
skipped = 0
|
||||
|
||||
for rec in extracted.get("records", []):
|
||||
parsed = self._parse_order(rec, context.store_id)
|
||||
if parsed:
|
||||
parsed_records.append(parsed)
|
||||
else:
|
||||
skipped += 1
|
||||
|
||||
return {
|
||||
"records": parsed_records,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
"""写入 fact_order"""
|
||||
loader = OrderLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_orders(
|
||||
transformed["records"], context.store_id
|
||||
)
|
||||
|
||||
counts = {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
return counts
|
||||
|
||||
# ------------------------------------------------------------------ 辅助方法
|
||||
def _parse_order(self, raw: dict, store_id: int) -> dict | None:
|
||||
"""解析单条订单记录"""
|
||||
try:
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"order_id": TypeParser.parse_int(raw.get("orderId")),
|
||||
"order_no": raw.get("orderNo"),
|
||||
"member_id": TypeParser.parse_int(raw.get("memberId")),
|
||||
"table_id": TypeParser.parse_int(raw.get("tableId")),
|
||||
"order_time": TypeParser.parse_timestamp(raw.get("orderTime"), self.tz),
|
||||
"end_time": TypeParser.parse_timestamp(raw.get("endTime"), self.tz),
|
||||
"total_amount": TypeParser.parse_decimal(raw.get("totalAmount")),
|
||||
"discount_amount": TypeParser.parse_decimal(raw.get("discountAmount")),
|
||||
"final_amount": TypeParser.parse_decimal(raw.get("finalAmount")),
|
||||
"pay_status": raw.get("payStatus"),
|
||||
"order_status": raw.get("orderStatus"),
|
||||
"remark": raw.get("remark"),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
except Exception as exc:
|
||||
self.logger.warning("解析订单失败: %s, 原始数据: %s", exc, raw)
|
||||
return None
|
||||
90
tasks/ods/packages_task.py
Normal file
90
tasks/ods/packages_task.py
Normal file
@@ -0,0 +1,90 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""团购/套餐定义任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.dimensions.package import PackageDefinitionLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class PackagesDefTask(BaseTask):
|
||||
"""同步团购套餐定义"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "PACKAGES_DEF"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params({"siteId": context.store_id})
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/PackageCoupon/QueryPackageCouponList",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="packageCouponList",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_package(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = PackageDefinitionLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_packages(transformed["records"])
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_package(self, raw: dict, store_id: int) -> dict | None:
|
||||
package_id = TypeParser.parse_int(raw.get("id"))
|
||||
if not package_id:
|
||||
self.logger.warning("跳过缺少 package id 的套餐记录: %s", raw)
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"package_id": package_id,
|
||||
"package_code": raw.get("package_id") or raw.get("packageId"),
|
||||
"package_name": raw.get("package_name"),
|
||||
"table_area_id": raw.get("table_area_id"),
|
||||
"table_area_name": raw.get("table_area_name"),
|
||||
"selling_price": TypeParser.parse_decimal(
|
||||
raw.get("selling_price") or raw.get("sellingPrice")
|
||||
),
|
||||
"duration_seconds": TypeParser.parse_int(raw.get("duration")),
|
||||
"start_time": TypeParser.parse_timestamp(
|
||||
raw.get("start_time") or raw.get("startTime"), self.tz
|
||||
),
|
||||
"end_time": TypeParser.parse_timestamp(
|
||||
raw.get("end_time") or raw.get("endTime"), self.tz
|
||||
),
|
||||
"type": raw.get("type"),
|
||||
"is_enabled": raw.get("is_enabled"),
|
||||
"is_delete": raw.get("is_delete"),
|
||||
"usable_count": TypeParser.parse_int(raw.get("usable_count")),
|
||||
"creator_name": raw.get("creator_name"),
|
||||
"date_type": raw.get("date_type"),
|
||||
"group_type": raw.get("group_type"),
|
||||
"coupon_money": TypeParser.parse_decimal(
|
||||
raw.get("coupon_money") or raw.get("couponMoney")
|
||||
),
|
||||
"area_tag_type": raw.get("area_tag_type"),
|
||||
"system_group_type": raw.get("system_group_type"),
|
||||
"card_type_ids": raw.get("card_type_ids"),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
111
tasks/ods/payments_task.py
Normal file
111
tasks/ods/payments_task.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""支付记录ETL任务"""
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.facts.payment import PaymentLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class PaymentsTask(BaseTask):
|
||||
"""支付记录 E/T/L 任务"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "PAYMENTS"
|
||||
|
||||
# ------------------------------------------------------------------ E/T/L 钩子
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
"""调用 API 抓取支付记录"""
|
||||
params = self._merge_common_params(
|
||||
{
|
||||
"siteId": context.store_id,
|
||||
"StartPayTime": TypeParser.format_timestamp(context.window_start, self.tz),
|
||||
"EndPayTime": TypeParser.format_timestamp(context.window_end, self.tz),
|
||||
}
|
||||
)
|
||||
records, pages_meta = self.api.get_paginated(
|
||||
endpoint="/PayLog/GetPayLogListPage",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
)
|
||||
return {"records": records, "meta": pages_meta}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
"""解析支付 JSON"""
|
||||
parsed, skipped = [], 0
|
||||
for rec in extracted.get("records", []):
|
||||
cleaned = self._parse_payment(rec, context.store_id)
|
||||
if cleaned:
|
||||
parsed.append(cleaned)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
"""写入 fact_payment"""
|
||||
loader = PaymentLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_payments(
|
||||
transformed["records"], context.store_id
|
||||
)
|
||||
counts = {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
return counts
|
||||
|
||||
# ------------------------------------------------------------------ 辅助方法
|
||||
def _parse_payment(self, raw: dict, store_id: int) -> dict | None:
|
||||
"""解析支付记录"""
|
||||
try:
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"pay_id": TypeParser.parse_int(raw.get("payId") or raw.get("id")),
|
||||
"order_id": TypeParser.parse_int(raw.get("orderId")),
|
||||
"order_settle_id": TypeParser.parse_int(
|
||||
raw.get("orderSettleId") or raw.get("order_settle_id")
|
||||
),
|
||||
"order_trade_no": TypeParser.parse_int(
|
||||
raw.get("orderTradeNo") or raw.get("order_trade_no")
|
||||
),
|
||||
"relate_type": raw.get("relateType") or raw.get("relate_type"),
|
||||
"relate_id": TypeParser.parse_int(raw.get("relateId") or raw.get("relate_id")),
|
||||
"site_id": TypeParser.parse_int(
|
||||
raw.get("siteId") or raw.get("site_id") or store_id
|
||||
),
|
||||
"tenant_id": TypeParser.parse_int(raw.get("tenantId") or raw.get("tenant_id")),
|
||||
"pay_time": TypeParser.parse_timestamp(raw.get("payTime"), self.tz),
|
||||
"create_time": TypeParser.parse_timestamp(
|
||||
raw.get("createTime") or raw.get("create_time"), self.tz
|
||||
),
|
||||
"pay_amount": TypeParser.parse_decimal(raw.get("payAmount")),
|
||||
"fee_amount": TypeParser.parse_decimal(
|
||||
raw.get("feeAmount")
|
||||
or raw.get("serviceFee")
|
||||
or raw.get("channelFee")
|
||||
or raw.get("fee_amount")
|
||||
),
|
||||
"discount_amount": TypeParser.parse_decimal(
|
||||
raw.get("discountAmount")
|
||||
or raw.get("couponAmount")
|
||||
or raw.get("discount_amount")
|
||||
),
|
||||
"pay_type": raw.get("payType"),
|
||||
"payment_method": raw.get("paymentMethod") or raw.get("payment_method"),
|
||||
"online_pay_channel": raw.get("onlinePayChannel")
|
||||
or raw.get("online_pay_channel"),
|
||||
"pay_status": raw.get("payStatus"),
|
||||
"pay_terminal": raw.get("payTerminal") or raw.get("pay_terminal"),
|
||||
"remark": raw.get("remark"),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
except Exception as exc:
|
||||
self.logger.warning("解析支付记录失败: %s, 原始数据: %s", exc, raw)
|
||||
return None
|
||||
93
tasks/ods/products_task.py
Normal file
93
tasks/ods/products_task.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""商品档案(PRODUCTS)ETL任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.dimensions.product import ProductLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class ProductsTask(BaseTask):
|
||||
"""商品维度 ETL 任务"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "PRODUCTS"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params({"siteId": context.store_id})
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/TenantGoods/QueryTenantGoods",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="tenantGoodsList",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
parsed_row = self._parse_product(raw, context.store_id)
|
||||
if parsed_row:
|
||||
parsed.append(parsed_row)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = ProductLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_products(
|
||||
transformed["records"], context.store_id
|
||||
)
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_product(self, raw: dict, store_id: int) -> dict | None:
|
||||
try:
|
||||
product_id = TypeParser.parse_int(
|
||||
raw.get("siteGoodsId") or raw.get("tenantGoodsId") or raw.get("productId")
|
||||
)
|
||||
if not product_id:
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"product_id": product_id,
|
||||
"site_product_id": TypeParser.parse_int(raw.get("siteGoodsId")),
|
||||
"product_name": raw.get("goodsName") or raw.get("productName"),
|
||||
"category_id": TypeParser.parse_int(
|
||||
raw.get("tenantGoodsCategoryId") or raw.get("goodsCategoryId")
|
||||
),
|
||||
"category_name": raw.get("categoryName"),
|
||||
"second_category_id": TypeParser.parse_int(raw.get("goodsCategorySecondId")),
|
||||
"unit": raw.get("goodsUnit"),
|
||||
"cost_price": TypeParser.parse_decimal(raw.get("costPrice")),
|
||||
"sale_price": TypeParser.parse_decimal(
|
||||
raw.get("goodsPrice") or raw.get("salePrice")
|
||||
),
|
||||
"allow_discount": None,
|
||||
"status": raw.get("goodsState") or raw.get("status"),
|
||||
"supplier_id": TypeParser.parse_int(raw.get("supplierId"))
|
||||
if raw.get("supplierId")
|
||||
else None,
|
||||
"barcode": raw.get("barcode"),
|
||||
"is_combo": bool(raw.get("isCombo"))
|
||||
if raw.get("isCombo") is not None
|
||||
else None,
|
||||
"created_time": TypeParser.parse_timestamp(raw.get("createTime"), self.tz),
|
||||
"updated_time": TypeParser.parse_timestamp(raw.get("updateTime"), self.tz),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
except Exception as exc:
|
||||
self.logger.warning("解析商品记录失败: %s, 原始数据: %s", exc, raw)
|
||||
return None
|
||||
90
tasks/ods/refunds_task.py
Normal file
90
tasks/ods/refunds_task.py
Normal file
@@ -0,0 +1,90 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""退款记录任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.facts.refund import RefundLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class RefundsTask(BaseTask):
|
||||
"""同步支付退款流水"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "REFUNDS"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params(
|
||||
{
|
||||
"siteId": context.store_id,
|
||||
"startTime": TypeParser.format_timestamp(context.window_start, self.tz),
|
||||
"endTime": TypeParser.format_timestamp(context.window_end, self.tz),
|
||||
}
|
||||
)
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/Order/GetRefundPayLogList",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_refund(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = RefundLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_refunds(transformed["records"])
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_refund(self, raw: dict, store_id: int) -> dict | None:
|
||||
refund_id = TypeParser.parse_int(raw.get("id"))
|
||||
if not refund_id:
|
||||
self.logger.warning("跳过缺少退款ID的数据: %s", raw)
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"refund_id": refund_id,
|
||||
"site_id": TypeParser.parse_int(raw.get("site_id") or raw.get("siteId")),
|
||||
"tenant_id": TypeParser.parse_int(raw.get("tenant_id") or raw.get("tenantId")),
|
||||
"pay_amount": TypeParser.parse_decimal(raw.get("pay_amount")),
|
||||
"pay_status": raw.get("pay_status"),
|
||||
"pay_time": TypeParser.parse_timestamp(
|
||||
raw.get("pay_time") or raw.get("payTime"), self.tz
|
||||
),
|
||||
"create_time": TypeParser.parse_timestamp(
|
||||
raw.get("create_time") or raw.get("createTime"), self.tz
|
||||
),
|
||||
"relate_type": raw.get("relate_type"),
|
||||
"relate_id": TypeParser.parse_int(raw.get("relate_id")),
|
||||
"payment_method": raw.get("payment_method"),
|
||||
"refund_amount": TypeParser.parse_decimal(raw.get("refund_amount")),
|
||||
"action_type": raw.get("action_type"),
|
||||
"pay_terminal": raw.get("pay_terminal"),
|
||||
"operator_id": TypeParser.parse_int(raw.get("operator_id")),
|
||||
"channel_pay_no": raw.get("channel_pay_no"),
|
||||
"channel_fee": TypeParser.parse_decimal(raw.get("channel_fee")),
|
||||
"is_delete": raw.get("is_delete"),
|
||||
"member_id": TypeParser.parse_int(raw.get("member_id")),
|
||||
"member_card_id": TypeParser.parse_int(raw.get("member_card_id")),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
92
tasks/ods/table_discount_task.py
Normal file
92
tasks/ods/table_discount_task.py
Normal file
@@ -0,0 +1,92 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""台费折扣任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.facts.table_discount import TableDiscountLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class TableDiscountTask(BaseTask):
|
||||
"""同步台费折扣/调价记录"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "TABLE_DISCOUNT"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params(
|
||||
{
|
||||
"siteId": context.store_id,
|
||||
"startTime": TypeParser.format_timestamp(context.window_start, self.tz),
|
||||
"endTime": TypeParser.format_timestamp(context.window_end, self.tz),
|
||||
}
|
||||
)
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/Site/GetTaiFeeAdjustList",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="taiFeeAdjustInfos",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_discount(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = TableDiscountLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_discounts(transformed["records"])
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_discount(self, raw: dict, store_id: int) -> dict | None:
|
||||
discount_id = TypeParser.parse_int(raw.get("id"))
|
||||
if not discount_id:
|
||||
self.logger.warning("跳过缺少折扣ID的记录: %s", raw)
|
||||
return None
|
||||
|
||||
table_profile = raw.get("tableProfile") or {}
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"discount_id": discount_id,
|
||||
"adjust_type": raw.get("adjust_type") or raw.get("adjustType"),
|
||||
"applicant_id": TypeParser.parse_int(raw.get("applicant_id")),
|
||||
"applicant_name": raw.get("applicant_name"),
|
||||
"operator_id": TypeParser.parse_int(raw.get("operator_id")),
|
||||
"operator_name": raw.get("operator_name"),
|
||||
"ledger_amount": TypeParser.parse_decimal(raw.get("ledger_amount")),
|
||||
"ledger_count": TypeParser.parse_int(raw.get("ledger_count")),
|
||||
"ledger_name": raw.get("ledger_name"),
|
||||
"ledger_status": raw.get("ledger_status"),
|
||||
"order_settle_id": TypeParser.parse_int(raw.get("order_settle_id")),
|
||||
"order_trade_no": TypeParser.parse_int(raw.get("order_trade_no")),
|
||||
"site_table_id": TypeParser.parse_int(
|
||||
raw.get("site_table_id") or table_profile.get("id")
|
||||
),
|
||||
"table_area_id": TypeParser.parse_int(
|
||||
raw.get("tableAreaId") or table_profile.get("site_table_area_id")
|
||||
),
|
||||
"table_area_name": table_profile.get("site_table_area_name"),
|
||||
"create_time": TypeParser.parse_timestamp(
|
||||
raw.get("create_time") or raw.get("createTime"), self.tz
|
||||
),
|
||||
"is_delete": raw.get("is_delete"),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
84
tasks/ods/tables_task.py
Normal file
84
tasks/ods/tables_task.py
Normal file
@@ -0,0 +1,84 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""台桌档案任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.dimensions.table import TableLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class TablesTask(BaseTask):
|
||||
"""同步门店台桌列表"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "TABLES"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params({"siteId": context.store_id})
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/Table/GetSiteTables",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="siteTables",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_table(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = TableLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_tables(transformed["records"])
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_table(self, raw: dict, store_id: int) -> dict | None:
|
||||
table_id = TypeParser.parse_int(raw.get("id"))
|
||||
if not table_id:
|
||||
self.logger.warning("跳过缺少 table_id 的台桌记录: %s", raw)
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"table_id": table_id,
|
||||
"site_id": TypeParser.parse_int(raw.get("site_id") or raw.get("siteId")),
|
||||
"area_id": TypeParser.parse_int(
|
||||
raw.get("site_table_area_id") or raw.get("siteTableAreaId")
|
||||
),
|
||||
"area_name": raw.get("areaName") or raw.get("site_table_area_name"),
|
||||
"table_name": raw.get("table_name") or raw.get("tableName"),
|
||||
"table_price": TypeParser.parse_decimal(
|
||||
raw.get("table_price") or raw.get("tablePrice")
|
||||
),
|
||||
"table_status": raw.get("table_status") or raw.get("tableStatus"),
|
||||
"table_status_name": raw.get("tableStatusName"),
|
||||
"light_status": raw.get("light_status"),
|
||||
"is_rest_area": raw.get("is_rest_area"),
|
||||
"show_status": raw.get("show_status"),
|
||||
"virtual_table": raw.get("virtual_table"),
|
||||
"charge_free": raw.get("charge_free"),
|
||||
"only_allow_groupon": raw.get("only_allow_groupon"),
|
||||
"is_online_reservation": raw.get("is_online_reservation"),
|
||||
"created_time": TypeParser.parse_timestamp(
|
||||
raw.get("create_time") or raw.get("createTime"), self.tz
|
||||
),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
102
tasks/ods/topups_task.py
Normal file
102
tasks/ods/topups_task.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""充值记录任务"""
|
||||
|
||||
import json
|
||||
|
||||
from tasks.base_task import BaseTask, TaskContext
|
||||
from loaders.facts.topup import TopupLoader
|
||||
from models.parsers import TypeParser
|
||||
|
||||
|
||||
class TopupsTask(BaseTask):
|
||||
"""同步储值充值结算记录"""
|
||||
|
||||
def get_task_code(self) -> str:
|
||||
return "TOPUPS"
|
||||
|
||||
def extract(self, context: TaskContext) -> dict:
|
||||
params = self._merge_common_params(
|
||||
{
|
||||
"siteId": context.store_id,
|
||||
"rangeStartTime": TypeParser.format_timestamp(context.window_start, self.tz),
|
||||
"rangeEndTime": TypeParser.format_timestamp(context.window_end, self.tz),
|
||||
}
|
||||
)
|
||||
records, _ = self.api.get_paginated(
|
||||
endpoint="/Site/GetRechargeSettleList",
|
||||
params=params,
|
||||
page_size=self.config.get("api.page_size", 200),
|
||||
data_path=("data",),
|
||||
list_key="settleList",
|
||||
)
|
||||
return {"records": records}
|
||||
|
||||
def transform(self, extracted: dict, context: TaskContext) -> dict:
|
||||
parsed, skipped = [], 0
|
||||
for raw in extracted.get("records", []):
|
||||
mapped = self._parse_topup(raw, context.store_id)
|
||||
if mapped:
|
||||
parsed.append(mapped)
|
||||
else:
|
||||
skipped += 1
|
||||
return {
|
||||
"records": parsed,
|
||||
"fetched": len(extracted.get("records", [])),
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
def load(self, transformed: dict, context: TaskContext) -> dict:
|
||||
loader = TopupLoader(self.db)
|
||||
inserted, updated, loader_skipped = loader.upsert_topups(transformed["records"])
|
||||
return {
|
||||
"fetched": transformed["fetched"],
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"skipped": transformed["skipped"] + loader_skipped,
|
||||
"errors": 0,
|
||||
}
|
||||
|
||||
def _parse_topup(self, raw: dict, store_id: int) -> dict | None:
|
||||
node = raw.get("settleList") if isinstance(raw.get("settleList"), dict) else raw
|
||||
topup_id = TypeParser.parse_int(node.get("id"))
|
||||
if not topup_id:
|
||||
self.logger.warning("跳过缺少充值ID的记录: %s", raw)
|
||||
return None
|
||||
|
||||
return {
|
||||
"store_id": store_id,
|
||||
"topup_id": topup_id,
|
||||
"member_id": TypeParser.parse_int(node.get("memberId")),
|
||||
"member_name": node.get("memberName"),
|
||||
"member_phone": node.get("memberPhone"),
|
||||
"card_id": TypeParser.parse_int(node.get("tenantMemberCardId")),
|
||||
"card_type_name": node.get("memberCardTypeName"),
|
||||
"pay_amount": TypeParser.parse_decimal(node.get("payAmount")),
|
||||
"consume_money": TypeParser.parse_decimal(node.get("consumeMoney")),
|
||||
"settle_status": node.get("settleStatus"),
|
||||
"settle_type": node.get("settleType"),
|
||||
"settle_name": node.get("settleName"),
|
||||
"settle_relate_id": TypeParser.parse_int(node.get("settleRelateId")),
|
||||
"pay_time": TypeParser.parse_timestamp(
|
||||
node.get("payTime") or node.get("pay_time"), self.tz
|
||||
),
|
||||
"create_time": TypeParser.parse_timestamp(
|
||||
node.get("createTime") or node.get("create_time"), self.tz
|
||||
),
|
||||
"operator_id": TypeParser.parse_int(node.get("operatorId")),
|
||||
"operator_name": node.get("operatorName"),
|
||||
"payment_method": node.get("paymentMethod"),
|
||||
"refund_amount": TypeParser.parse_decimal(node.get("refundAmount")),
|
||||
"cash_amount": TypeParser.parse_decimal(node.get("cashAmount")),
|
||||
"card_amount": TypeParser.parse_decimal(node.get("cardAmount")),
|
||||
"balance_amount": TypeParser.parse_decimal(node.get("balanceAmount")),
|
||||
"online_amount": TypeParser.parse_decimal(node.get("onlineAmount")),
|
||||
"rounding_amount": TypeParser.parse_decimal(node.get("roundingAmount")),
|
||||
"adjust_amount": TypeParser.parse_decimal(node.get("adjustAmount")),
|
||||
"goods_money": TypeParser.parse_decimal(node.get("goodsMoney")),
|
||||
"table_charge_money": TypeParser.parse_decimal(node.get("tableChargeMoney")),
|
||||
"service_money": TypeParser.parse_decimal(node.get("serviceMoney")),
|
||||
"coupon_amount": TypeParser.parse_decimal(node.get("couponAmount")),
|
||||
"order_remark": node.get("orderRemark"),
|
||||
"raw_data": json.dumps(raw, ensure_ascii=False),
|
||||
}
|
||||
Reference in New Issue
Block a user