-
-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Automated i18n checks and issue management (#55)
* Some i18n checks that should make it into automated builds one day * Add PR checks and auto i18n issue management :crossed-fingers:
- Loading branch information
Showing
9 changed files
with
350 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
name: Translation Issue Updates | ||
|
||
on: | ||
push: | ||
branches: | ||
- main | ||
|
||
jobs: | ||
check_translations: | ||
runs-on: ubuntu-latest | ||
|
||
steps: | ||
- name: Checkout code | ||
uses: actions/checkout@v3 | ||
|
||
- name: Set up Python | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: '3.x' | ||
|
||
- name: Install Python dependencies | ||
run: pip install -r requirements.txt | ||
|
||
- name: Run translation check script | ||
run: python .scripts/missing_translation_check.py | ||
|
||
- name: Create or Update Issues | ||
run: python .scripts/create_or_update_i18n_issues.py | ||
env: | ||
GITHUB_TOKEN: ${{ secrets.GH_PAT }} | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
name: PR Translation Check | ||
|
||
on: | ||
pull_request: | ||
branches: | ||
- main | ||
|
||
jobs: | ||
check_translations: | ||
runs-on: ubuntu-latest | ||
|
||
steps: | ||
- name: Checkout code | ||
uses: actions/checkout@v3 | ||
|
||
- name: Set up Python | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: '3.x' | ||
|
||
- name: Install Python dependencies | ||
run: pip install -r requirements.txt | ||
|
||
- name: Run translation check script | ||
run: python .scripts/missing_translation_check.py | ||
|
||
- name: Check for Hard-coded strings | ||
run: python .scripts/hardcode_string_check.py |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -42,6 +42,10 @@ luac.out | |
.release/* | ||
!.release/local.sh | ||
|
||
.scripts/.output | ||
|
||
.venv | ||
|
||
/luarocks | ||
/lua | ||
/lua_modules | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,84 @@ | ||
import os | ||
import requests | ||
import urllib.parse | ||
|
||
# Set up necessary constants | ||
GITHUB_API_URL = "https://api.github.com" | ||
REPO_OWNER = "Mctalian" | ||
REPO_NAME = "RPGLootFeed" | ||
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN") | ||
|
||
# GitHub API headers | ||
headers = { | ||
"Authorization": f"Bearer {GITHUB_TOKEN}", | ||
"Accept": "application/vnd.github.v3+json", | ||
} | ||
|
||
def get_all_translation_issues(): | ||
"""Search for an existing issue for the given locale.""" | ||
search_url = f"{GITHUB_API_URL}/search/issues" | ||
query = f"repo:{REPO_OWNER}/{REPO_NAME} is:issue label:i18n label:\"help wanted\" state:open" | ||
params = {"q": query} | ||
|
||
response = requests.get(search_url, headers=headers, params=params) | ||
response.raise_for_status() | ||
issues = response.json().get("items", []) | ||
|
||
# Create a dictionary with locale as the key and the issue as the value | ||
issues_dict = {} | ||
for issue in issues: | ||
title = issue["title"] | ||
# Strip the 'i18n: ' prefix and ' Translations' suffix to extract the locale | ||
if title.startswith("i18n: ") and title.endswith(" Translations"): | ||
locale = title[len("i18n: "):-len(" Translations")] | ||
issues_dict[locale] = issue | ||
|
||
return issues_dict | ||
|
||
def create_issue(locale, markdown_content): | ||
"""Create a new GitHub issue with the given locale and content.""" | ||
issue_url = f"{GITHUB_API_URL}/repos/{REPO_OWNER}/{REPO_NAME}/issues" | ||
title = f"i18n: {locale} Translations" | ||
issue_data = { | ||
"title": title, | ||
"body": markdown_content, | ||
"labels": ["i18n", "help wanted"], # Add or modify labels as needed | ||
} | ||
|
||
response = requests.post(issue_url, headers=headers, json=issue_data) | ||
response.raise_for_status() | ||
print(f"Issue created: {response.json().get('html_url')}") | ||
|
||
def update_issue(issue_number, markdown_content): | ||
"""Update an existing GitHub issue with the new markdown content.""" | ||
issue_url = f"{GITHUB_API_URL}/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue_number}" | ||
issue_data = { | ||
"body": markdown_content, | ||
} | ||
|
||
response = requests.patch(issue_url, headers=headers, json=issue_data) | ||
response.raise_for_status() | ||
print(f"Issue updated: {response.json().get('html_url')}") | ||
|
||
def process_markdown_files(output_directory): | ||
"""Process each markdown file and create or update the corresponding GitHub issue.""" | ||
issues_dict = get_all_translation_issues() | ||
|
||
for filename in os.listdir(output_directory): | ||
if filename.endswith("_missing_keys.md"): | ||
locale = filename.split(".")[0] | ||
with open(os.path.join(output_directory, filename), "r") as file: | ||
markdown_content = file.read() | ||
|
||
# Check for an existing issue | ||
existing_issue = issues_dict.get(locale) | ||
if existing_issue: | ||
# Update the existing issue | ||
update_issue(existing_issue["number"], markdown_content) | ||
else: | ||
# Create a new issue | ||
create_issue(locale, markdown_content) | ||
|
||
if __name__ == "__main__": | ||
output_directory = ".scripts/.output" | ||
process_markdown_files(output_directory) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
import os | ||
import re | ||
|
||
# Function to check if a file or directory should be ignored | ||
def should_ignore(path, ignore_files, ignore_dirs): | ||
for dir in ignore_dirs: | ||
if path.startswith(f"./{dir}/"): | ||
return True | ||
return os.path.basename(path) in ignore_files | ||
|
||
# Function to scan for hard-coded strings | ||
def check_hardcoded_strings(file_content, filename): | ||
issues = [] | ||
|
||
# Check for Print(...) calls with hard-coded strings | ||
print_matches = re.findall(r'Print\(\s*"([^"]+)"\s*\)', file_content) | ||
for match in print_matches: | ||
issues.append(f'Hard-coded string in Print(...) in {filename}: "{match}"') | ||
|
||
# Check for config options with hard-coded name or desc fields | ||
config_matches = re.findall(r'\b(name|desc)\s*=\s*"([^"]+)"', file_content) | ||
for field, value in config_matches: | ||
issues.append(f'Hard-coded {field} in {filename}: "{value}"') | ||
|
||
# Check for config options with hard-coded values in key-value pairs within "values" tables | ||
values_matches = re.findall(r'\bvalues\s*=\s*{([^}]*)}', file_content, re.DOTALL) | ||
for match in values_matches: | ||
key_value_matches = re.findall(r'\[?"?(.*)"?\]?\s*=\s*"([^"]+)"', match) | ||
for key, value in key_value_matches: | ||
issues.append(f'Hard-coded key-value pair in "values" table in {filename}: "{key.strip()} = {value.strip()}"') | ||
|
||
return issues | ||
|
||
|
||
# Function to recursively scan directories for .lua files | ||
def scan_directory(directory, ignore_files=[], ignore_dirs=[]): | ||
all_issues = [] | ||
|
||
for root, dirs, files in os.walk(directory): | ||
# Modify dirs in place to remove ignored directories from the scan | ||
dirs[:] = [d for d in dirs if not should_ignore(os.path.join(root, d), [], ignore_dirs)] | ||
|
||
for file in files: | ||
if file.endswith(".lua") and not should_ignore(os.path.join(root, file), ignore_files, []): | ||
filepath = os.path.join(root, file) | ||
with open(filepath, 'r', encoding='utf-8') as f: | ||
content = f.read() | ||
issues = check_hardcoded_strings(content, filepath) | ||
if issues: | ||
all_issues.extend(issues) | ||
|
||
return all_issues | ||
|
||
def main(): | ||
ignore_files = ['TestMode.lua'] | ||
ignore_dirs = ['.git', '.scripts', '.release', 'locale'] | ||
|
||
# Scan the current directory | ||
issues = scan_directory('.', ignore_files, ignore_dirs) | ||
|
||
# Output any issues found | ||
if issues: | ||
print("Hard-coded strings found:") | ||
for issue in issues: | ||
print(f" {issue}") | ||
exit(1) | ||
else: | ||
print("No hard-coded strings found.") | ||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,110 @@ | ||
import os | ||
import re | ||
import xml.etree.ElementTree as ET | ||
|
||
# Step 1: Parse locales.xml to extract Lua file names | ||
def parse_locales_xml(xml_file): | ||
tree = ET.parse(xml_file) | ||
root = tree.getroot() | ||
namespace = {'ns': root.tag.split('}')[0].strip('{')} | ||
locale_files = [script.attrib['file'] for script in root.findall('ns:Script', namespace)] | ||
return locale_files | ||
|
||
# Step 2: Load and parse a Lua file into a dictionary | ||
def load_lua_file(lua_file): | ||
result = {} | ||
with open(lua_file, 'r') as file: | ||
for line in file: | ||
# Use regex to capture the key and value correctly | ||
match = re.match(r'L\["(.+)"\] = "?(.*)"?', line.strip()) | ||
if match: | ||
key = match[1] | ||
value = match[2] | ||
result[key] = value | ||
return result | ||
|
||
# Step 3: Compare translations | ||
def compare_translations(reference_dict, target_dict, locale): | ||
missing_keys = [] | ||
extra_keys = [] | ||
|
||
# Check for missing keys in the target dictionary | ||
for key, value in reference_dict.items(): | ||
if key not in target_dict: | ||
# If the reference value is True, use the key as the value | ||
enUS_value = key if value.lower() == 'true' else value | ||
missing_keys.append(f"| {key} | {enUS_value} |") | ||
|
||
# Check for extra keys in the target dictionary | ||
for key in target_dict: | ||
if key not in reference_dict: | ||
extra_keys.append(key) | ||
|
||
# Create markdown output for missing keys | ||
if missing_keys: | ||
markdown_report = f"# Missing Translations for {locale}\n\n" | ||
markdown_report += "| Missing Key | enUS Value |\n" | ||
markdown_report += "|-------------|------------|\n" | ||
markdown_report += "\n".join(missing_keys) | ||
markdown_report += "\n\nPlease provide one or more of these values in a Pull Request or a Comment on this issue:\n\n" | ||
markdown_report += "```\n" | ||
markdown_report += "\n".join([f'L["{key.split()[1]}"] = ""' for key in missing_keys]) | ||
markdown_report += "\n```\n" | ||
else: | ||
markdown_report = None | ||
|
||
return markdown_report, extra_keys | ||
|
||
# Step 4: Main function to load files and perform comparison | ||
def main(): | ||
locale_dir = "locale" | ||
output_directory = ".scripts/.output" | ||
|
||
# Create output_directory if it doesn't exist | ||
if not os.path.exists(output_directory): | ||
os.makedirs(output_directory) | ||
else: | ||
for filename in os.listdir(output_directory): | ||
file_path = os.path.join(output_directory, filename) | ||
try: | ||
os.unlink(file_path) | ||
except Exception as e: | ||
print('Failed to delete %s. Reason: %s' % (file_path, e)) | ||
|
||
locales_xml = f"{locale_dir}/locales.xml" | ||
locale_files = parse_locales_xml(locales_xml) | ||
|
||
# Reference locale (enUS.lua) | ||
reference_file = "enUS.lua" | ||
reference_dict = load_lua_file(f"{locale_dir}/{reference_file}") | ||
|
||
has_extra_keys = False | ||
|
||
# Compare each locale with the reference | ||
for locale_file in locale_files: | ||
if locale_file != reference_file: | ||
target_dict = load_lua_file(f"{locale_dir}/{locale_file}") | ||
markdown_report, extra_keys = compare_translations(reference_dict, target_dict, locale_file) | ||
|
||
if markdown_report: | ||
# Create output file for missing translations | ||
output_file_path = os.path.join(output_directory, f"{locale_file}_missing_keys.md") | ||
with open(output_file_path, "w") as output_file: | ||
output_file.write(markdown_report) | ||
print(f"Missing translations written to {output_file_path}") | ||
|
||
if extra_keys: | ||
# Print extra keys to console and set flag | ||
print(f"Extra translation keys in {locale_file}:") | ||
for key in extra_keys: | ||
print(f" {key}") | ||
has_extra_keys = True | ||
|
||
# Exit with non-zero code if extra keys were found | ||
if has_extra_keys: | ||
sys.exit(1) | ||
else: | ||
print("No extra translation keys found.") | ||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
.PHONY: all_checks venv_up hardcode_string_check missing_translation_check | ||
|
||
all_checks: venv_up hardcode_string_check missing_translation_check | ||
|
||
# Variables | ||
PYTHON := python3 | ||
|
||
# Target for running the hardcoded string checker | ||
hardcode_string_check: | ||
@.venv/bin/python .scripts/hardcode_string_check.py | ||
|
||
# Target for running the missing translation checker | ||
missing_translation_check: | ||
@.venv/bin/python .scripts/missing_translation_check.py | ||
|
||
venv_up: | ||
@if [ ! -d ".venv" ]; then $(PYTHON) -m venv ./.venv; fi |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
certifi==2024.7.4 | ||
charset-normalizer==3.3.2 | ||
idna==3.8 | ||
requests==2.32.3 | ||
urllib3==2.2.2 |