阶段性更新
This commit is contained in:
@@ -135,16 +135,41 @@ class FakeDBOperations:
|
||||
|
||||
def __init__(self):
|
||||
self.upserts: List[Dict] = []
|
||||
self.executes: List[Dict] = []
|
||||
self.commits = 0
|
||||
self.rollbacks = 0
|
||||
self.conn = FakeConnection()
|
||||
|
||||
def batch_upsert_with_returning(self, sql: str, rows: List[Dict], page_size: int = 1000):
|
||||
self.upserts.append({"sql": sql.strip(), "count": len(rows), "page_size": page_size})
|
||||
self.upserts.append(
|
||||
{
|
||||
"sql": sql.strip(),
|
||||
"count": len(rows),
|
||||
"page_size": page_size,
|
||||
"rows": [dict(row) for row in rows],
|
||||
}
|
||||
)
|
||||
return len(rows), 0
|
||||
|
||||
def batch_execute(self, sql: str, rows: List[Dict], page_size: int = 1000):
|
||||
self.upserts.append({"sql": sql.strip(), "count": len(rows), "page_size": page_size})
|
||||
self.executes.append(
|
||||
{
|
||||
"sql": sql.strip(),
|
||||
"count": len(rows),
|
||||
"page_size": page_size,
|
||||
"rows": [dict(row) for row in rows],
|
||||
}
|
||||
)
|
||||
|
||||
def execute(self, sql: str, params=None):
|
||||
self.executes.append({"sql": sql.strip(), "params": params})
|
||||
|
||||
def query(self, sql: str, params=None):
|
||||
self.executes.append({"sql": sql.strip(), "params": params, "type": "query"})
|
||||
return []
|
||||
|
||||
def cursor(self):
|
||||
return self.conn.cursor()
|
||||
|
||||
def commit(self):
|
||||
self.commits += 1
|
||||
@@ -161,22 +186,53 @@ class FakeAPIClient:
|
||||
self.calls: List[Dict] = []
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def get_paginated(self, endpoint: str, params=None, **kwargs):
|
||||
def iter_paginated(
|
||||
self,
|
||||
endpoint: str,
|
||||
params=None,
|
||||
page_size: int = 200,
|
||||
page_field: str = "pageIndex",
|
||||
size_field: str = "pageSize",
|
||||
data_path: Tuple[str, ...] = (),
|
||||
list_key: str | None = None,
|
||||
):
|
||||
self.calls.append({"endpoint": endpoint, "params": params})
|
||||
if endpoint not in self.data_map:
|
||||
raise AssertionError(f"Missing fixture for endpoint {endpoint}")
|
||||
return list(self.data_map[endpoint]), [{"page": 1, "size": len(self.data_map[endpoint])}]
|
||||
|
||||
records = list(self.data_map[endpoint])
|
||||
yield 1, records, dict(params or {}), {"data": records}
|
||||
|
||||
def get_paginated(self, endpoint: str, params=None, **kwargs):
|
||||
records = []
|
||||
pages = []
|
||||
for page_no, page_records, req, resp in self.iter_paginated(endpoint, params, **kwargs):
|
||||
records.extend(page_records)
|
||||
pages.append({"page": page_no, "request": req, "response": resp})
|
||||
return records, pages
|
||||
|
||||
def get_source_hint(self, endpoint: str) -> str | None:
|
||||
return None
|
||||
|
||||
|
||||
class OfflineAPIClient:
|
||||
"""离线模式专用 API Client,根据 endpoint 读取归档 JSON、套用 data_path 并回放列表数据。"""
|
||||
"""离线模式专用 API Client,根据 endpoint 读取归档 JSON、套入 data_path 并回放列表数据。"""
|
||||
|
||||
def __init__(self, file_map: Dict[str, Path]):
|
||||
self.file_map = {k: Path(v) for k, v in file_map.items()}
|
||||
self.calls: List[Dict] = []
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def get_paginated(self, endpoint: str, params=None, page_size: int = 200, data_path: Tuple[str, ...] = (), **kwargs):
|
||||
def iter_paginated(
|
||||
self,
|
||||
endpoint: str,
|
||||
params=None,
|
||||
page_size: int = 200,
|
||||
page_field: str = "pageIndex",
|
||||
size_field: str = "pageSize",
|
||||
data_path: Tuple[str, ...] = (),
|
||||
list_key: str | None = None,
|
||||
):
|
||||
self.calls.append({"endpoint": endpoint, "params": params})
|
||||
if endpoint not in self.file_map:
|
||||
raise AssertionError(f"Missing archive for endpoint {endpoint}")
|
||||
@@ -188,17 +244,42 @@ class OfflineAPIClient:
|
||||
for key in data_path:
|
||||
if isinstance(data, dict):
|
||||
data = data.get(key, [])
|
||||
else:
|
||||
data = []
|
||||
break
|
||||
|
||||
if list_key and isinstance(data, dict):
|
||||
data = data.get(list_key, [])
|
||||
|
||||
if not isinstance(data, list):
|
||||
data = []
|
||||
|
||||
return data, [{"page": 1, "mode": "offline"}]
|
||||
total = len(data)
|
||||
start = 0
|
||||
page = 1
|
||||
while start < total or (start == 0 and total == 0):
|
||||
chunk = data[start : start + page_size]
|
||||
if not chunk and total != 0:
|
||||
break
|
||||
yield page, list(chunk), dict(params or {}), payload
|
||||
if len(chunk) < page_size:
|
||||
break
|
||||
start += page_size
|
||||
page += 1
|
||||
|
||||
def get_paginated(self, endpoint: str, params=None, **kwargs):
|
||||
records = []
|
||||
pages = []
|
||||
for page_no, page_records, req, resp in self.iter_paginated(endpoint, params, **kwargs):
|
||||
records.extend(page_records)
|
||||
pages.append({"page": page_no, "request": req, "response": resp})
|
||||
return records, pages
|
||||
|
||||
def get_source_hint(self, endpoint: str) -> str | None:
|
||||
if endpoint not in self.file_map:
|
||||
return None
|
||||
return str(self.file_map[endpoint])
|
||||
|
||||
|
||||
class RealDBOperationsAdapter:
|
||||
|
||||
"""连接真实 PostgreSQL 的适配器,为任务提供 batch_upsert + 事务能力。"""
|
||||
|
||||
def __init__(self, dsn: str):
|
||||
|
||||
Reference in New Issue
Block a user