微信小程序页面迁移校验之前 P5任务处理之前

This commit is contained in:
Neo
2026-03-09 01:19:21 +08:00
parent 263bf96035
commit 6e20987d2f
1112 changed files with 153824 additions and 219694 deletions

View File

@@ -0,0 +1,124 @@
"""
从 Kiro Agent 缓存中恢复 docs/h5_ui/ 文件到 2026-03-04 09:00 (北京时间) 之前的最新版本。
扫描缓存目录中所有 hash 子目录,找到每个 h5_ui 文件的最新缓存版本,
与当前工作区版本对比,仅恢复有差异的文件。
"""
import os
import shutil
import hashlib
from pathlib import Path
from datetime import datetime, timezone, timedelta
# 北京时间 = UTC+8
BJT = timezone(timedelta(hours=8))
CUTOFF = datetime(2026, 3, 4, 9, 0, 0, tzinfo=BJT)
CACHE_ROOT = Path(
r"C:\Users\Administrator\AppData\Roaming\Kiro\User\globalStorage"
r"\kiro.kiroagent\f2aaff8dac256b544f00059b8ef5d1e2"
r"\74a08cf8613c7dec4db7b264470db812"
)
WORKSPACE = Path(r"C:\NeoZQYY")
TARGET_PREFIX = Path("docs") / "h5_ui"
def md5(path: Path) -> str:
h = hashlib.md5()
h.update(path.read_bytes())
return h.hexdigest()
def scan_cache():
"""扫描缓存,返回 {relative_path: [(mtime_utc, full_path), ...]}"""
results: dict[str, list[tuple[datetime, Path]]] = {}
if not CACHE_ROOT.exists():
print(f"[ERROR] 缓存根目录不存在: {CACHE_ROOT}")
return results
for hash_dir in CACHE_ROOT.iterdir():
if not hash_dir.is_dir():
continue
h5_base = hash_dir / "docs" / "h5_ui"
if not h5_base.exists():
continue
for f in h5_base.rglob("*"):
if not f.is_file():
continue
rel = f.relative_to(hash_dir) # e.g. docs/h5_ui/pages/xxx.html
mtime = datetime.fromtimestamp(f.stat().st_mtime, tz=BJT)
results.setdefault(str(rel), []).append((mtime, f))
return results
def find_best_versions(cache_map):
"""对每个文件,找 cutoff 之前最新的版本;若全部早于 cutoff 则取最新的那个"""
best = {}
for rel, versions in cache_map.items():
# 按时间降序
versions.sort(key=lambda x: x[0], reverse=True)
# 优先找 cutoff 之前的
for mtime, path in versions:
if mtime <= CUTOFF:
best[rel] = (mtime, path)
break
if rel not in best:
# 所有版本都在 cutoff 之后(不太可能),取最早的
best[rel] = versions[-1]
return best
def main():
print(f"[scan] 缓存根: {CACHE_ROOT}")
print(f"[scan] 截止时间: {CUTOFF.isoformat()}")
cache_map = scan_cache()
if not cache_map:
print("[ERROR] 未找到任何 h5_ui 缓存文件")
return
print(f"[scan] 找到 {len(cache_map)} 个文件的缓存版本")
best = find_best_versions(cache_map)
restored = []
skipped_same = []
skipped_missing = []
for rel, (mtime, cached_path) in sorted(best.items()):
workspace_path = WORKSPACE / rel
if not workspace_path.exists():
# 工作区没有这个文件,直接复制
workspace_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(cached_path, workspace_path)
restored.append((rel, mtime, "NEW"))
continue
# 比较内容
if md5(cached_path) == md5(workspace_path):
skipped_same.append(rel)
continue
# 内容不同,用缓存版本覆盖
shutil.copy2(cached_path, workspace_path)
restored.append((rel, mtime, "UPDATED"))
print(f"\n{'='*60}")
print(f"恢复结果摘要")
print(f"{'='*60}")
if restored:
print(f"\n已恢复 {len(restored)} 个文件:")
for rel, mtime, action in restored:
print(f" [{action}] {rel} (缓存时间: {mtime.strftime('%Y-%m-%d %H:%M:%S')})")
else:
print("\n没有需要恢复的文件(缓存版本与当前工作区一致)")
if skipped_same:
print(f"\n跳过 {len(skipped_same)} 个文件(内容与当前一致):")
for rel in skipped_same:
print(f" [SAME] {rel}")
print(f"\n总计: 缓存文件 {len(cache_map)}, 恢复 {len(restored)}, 跳过 {len(skipped_same)}")
if __name__ == "__main__":
main()