|
|
#!/usr/bin/env python3
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
import re
|
|
|
import glob
|
|
|
|
|
|
def clean_duplicate_keys(file_path):
|
|
|
"""清理文件中的重复key,保留最后一个定义"""
|
|
|
print(f"Cleaning duplicates in {file_path}...")
|
|
|
|
|
|
# 读取文件内容
|
|
|
with open(file_path, 'r', encoding='utf-8') as f:
|
|
|
lines = f.readlines()
|
|
|
|
|
|
# 解析key-value对
|
|
|
key_value_pairs = {}
|
|
|
line_info = []
|
|
|
|
|
|
for i, line in enumerate(lines):
|
|
|
line = line.strip()
|
|
|
if not line or line.startswith('//'):
|
|
|
line_info.append((i, line, None, None))
|
|
|
continue
|
|
|
|
|
|
match = re.match(r'^"([^"]+)"\s*=\s*"([^"]*)"\s*;', line)
|
|
|
if match:
|
|
|
key, value = match.group(1), match.group(2)
|
|
|
key_value_pairs[key] = value
|
|
|
line_info.append((i, line, key, value))
|
|
|
else:
|
|
|
line_info.append((i, line, None, None))
|
|
|
|
|
|
# 重建文件内容,去除重复的key
|
|
|
seen_keys = set()
|
|
|
cleaned_lines = []
|
|
|
|
|
|
for i, line, key, value in reversed(line_info): # 从后往前处理,保留最后一个定义
|
|
|
if key is None:
|
|
|
cleaned_lines.insert(0, line)
|
|
|
elif key not in seen_keys:
|
|
|
seen_keys.add(key)
|
|
|
cleaned_lines.insert(0, line)
|
|
|
else:
|
|
|
print(f" Removed duplicate key: {key}")
|
|
|
|
|
|
# 写回文件
|
|
|
with open(file_path, 'w', encoding='utf-8') as f:
|
|
|
for line in cleaned_lines:
|
|
|
f.write(line + '\n')
|
|
|
|
|
|
print(f" Cleaned {file_path}")
|
|
|
|
|
|
def main():
|
|
|
"""主函数"""
|
|
|
print("Cleaning duplicate keys in localization files...")
|
|
|
|
|
|
# 处理所有本地化文件
|
|
|
lproj_files = glob.glob('MyQrCode/*.lproj/Localizable.strings')
|
|
|
|
|
|
for file_path in lproj_files:
|
|
|
clean_duplicate_keys(file_path)
|
|
|
|
|
|
print("Done!")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
main()
|