feat:初版
This commit is contained in:
158
tools/generate_sandboxvars_zh.py
Normal file
158
tools/generate_sandboxvars_zh.py
Normal file
@@ -0,0 +1,158 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from urllib.error import URLError
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _decode_utf8_keep_newlines(path: Path) -> str:
|
||||
return path.read_bytes().decode("utf-8-sig")
|
||||
|
||||
|
||||
def translate_en_to_zh(
|
||||
text: str,
|
||||
*,
|
||||
timeout_s: int = 45,
|
||||
retries: int = 6,
|
||||
backoff_s: float = 1.0,
|
||||
) -> str:
|
||||
"""
|
||||
Uses Google Translate's public endpoint (no API key) to translate English -> zh-CN.
|
||||
|
||||
Note: This is a best-effort helper for generating a local offline mapping file.
|
||||
"""
|
||||
if not text.strip():
|
||||
return ""
|
||||
|
||||
q = urllib.parse.quote(text)
|
||||
url = (
|
||||
"https://translate.googleapis.com/translate_a/single"
|
||||
"?client=gtx&sl=en&tl=zh-CN&dt=t&q="
|
||||
+ q
|
||||
)
|
||||
headers = {"User-Agent": "pz-config-editor/1.0 (+https://translate.googleapis.com)"}
|
||||
req = urllib.request.Request(url, headers=headers)
|
||||
|
||||
last_err: Exception | None = None
|
||||
for attempt in range(retries):
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout_s) as resp:
|
||||
data = resp.read().decode("utf-8")
|
||||
payload = json.loads(data)
|
||||
parts = payload[0] or []
|
||||
return "".join((p[0] or "") for p in parts)
|
||||
except (TimeoutError, URLError, json.JSONDecodeError) as e:
|
||||
last_err = e
|
||||
time.sleep(backoff_s * (attempt + 1))
|
||||
|
||||
raise RuntimeError(f"Translate failed after {retries} retries: {last_err}") from last_err
|
||||
|
||||
|
||||
def extract_sandbox_comment_blocks(lua_path: Path) -> dict[str, str]:
|
||||
"""
|
||||
Returns: { "Zombies": "comment lines...", "ZombieLore.Speed": "comment lines..." }
|
||||
"""
|
||||
lines = _decode_utf8_keep_newlines(lua_path).splitlines()
|
||||
|
||||
def is_ident(s: str) -> bool:
|
||||
return s.isidentifier()
|
||||
|
||||
comments: list[str] = []
|
||||
table_stack: list[str] = []
|
||||
out: dict[str, str] = {}
|
||||
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
if stripped.startswith("--"):
|
||||
comments.append(stripped[2:].lstrip())
|
||||
continue
|
||||
|
||||
if not stripped:
|
||||
comments = []
|
||||
continue
|
||||
|
||||
if stripped.endswith("= {"):
|
||||
name = stripped.split("=", 1)[0].strip()
|
||||
if is_ident(name):
|
||||
table_stack.append(name)
|
||||
comments = []
|
||||
continue
|
||||
|
||||
if stripped == "},":
|
||||
if table_stack:
|
||||
table_stack.pop()
|
||||
comments = []
|
||||
continue
|
||||
|
||||
if "=" in stripped and stripped.endswith(","):
|
||||
key = stripped.split("=", 1)[0].strip()
|
||||
if not is_ident(key):
|
||||
comments = []
|
||||
continue
|
||||
|
||||
effective_stack = table_stack[:]
|
||||
if effective_stack[:1] == ["SandboxVars"]:
|
||||
effective_stack = effective_stack[1:]
|
||||
setting_path = ".".join(effective_stack + [key]) if effective_stack else key
|
||||
out[setting_path] = "\n".join(comments).strip()
|
||||
comments = []
|
||||
continue
|
||||
|
||||
comments = []
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Generate SandboxVars English->Chinese comment mapping JSON.")
|
||||
parser.add_argument("--lua", default="server_SandboxVars.lua", help="Path to server_SandboxVars.lua")
|
||||
parser.add_argument("--out", default=str(Path("i18n") / "sandboxvars_zh.json"), help="Output JSON path")
|
||||
parser.add_argument("--sleep", type=float, default=0.12, help="Sleep between requests (seconds)")
|
||||
parser.add_argument("--resume", action="store_true", help="If output exists, load and continue")
|
||||
args = parser.parse_args()
|
||||
|
||||
lua_path = Path(args.lua)
|
||||
out_path = Path(args.out)
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
blocks = extract_sandbox_comment_blocks(lua_path)
|
||||
|
||||
mapping: dict[str, str] = {
|
||||
"VERSION": "SandboxVars 文件版本号(通常不用改)。",
|
||||
"StartYear": "开局年份(通常与开局日期/月份一起使用)。",
|
||||
}
|
||||
if args.resume and out_path.exists():
|
||||
try:
|
||||
mapping.update(json.loads(out_path.read_text(encoding="utf-8")))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
cache: dict[str, str] = {}
|
||||
total = 0
|
||||
for key, en in blocks.items():
|
||||
if key in mapping:
|
||||
continue
|
||||
total += 1
|
||||
if en in cache:
|
||||
mapping[key] = cache[en]
|
||||
continue
|
||||
try:
|
||||
zh = translate_en_to_zh(en)
|
||||
except Exception as e:
|
||||
print(f"[WARN] {key}: {e}")
|
||||
zh = ""
|
||||
cache[en] = zh
|
||||
mapping[key] = zh
|
||||
time.sleep(args.sleep)
|
||||
|
||||
out_path.write_text(json.dumps(mapping, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
print(f"Wrote {len(mapping)} entries to {out_path} (translated {total} blocks).")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
Reference in New Issue
Block a user