feat(extract-translations): add extract translations script #597
Open
bertyhell wants to merge 16 commits from bertyhell/feature/translations-extraction-script
into master
11 changed files with 289 additions and 149 deletions
@ -0,0 +1,99 @@ |
|||||
![]()
Saibamen
commented 3 years ago
(Migrated from github.com)
Review
![]()
Saibamen
commented 3 years ago
(Migrated from github.com)
Review
|
|||||
|
const findInFiles = require("find-in-files"); |
||||
![]()
Review
|
|||||
|
const _ = require("lodash"); |
||||
![]()
Review
|
|||||
|
const fs = require("fs/promises"); |
||||
![]()
Review
|
|||||
|
const JSON5 = require("json5"); |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
// Extract translations from $t() functions in the source code and add the missing translations to all language files in src/languages/*.js
|
||||
![]()
Review
|
|||||
|
async function extractTranslations() { |
||||
![]()
Review
|
|||||
|
// Load all ES6 module translation files into a commonJS process
|
||||
![]()
Review
|
|||||
|
const languageList = {}; |
||||
![]()
Review
|
|||||
|
const filesNames = await fs.readdir("src/languages"); |
||||
![]()
Review
|
|||||
|
for (let fileName of filesNames) { |
||||
![]()
Review
|
|||||
|
if (fileName.endsWith("js") && fileName !== "index.js") { |
||||
![]()
Review
|
|||||
|
const content = (await fs.readFile("src/languages/" + fileName)).toString("utf-8"); |
||||
![]()
Review
|
|||||
|
const json = content.replace("export default {", "{").replace("};", "}"); |
||||
![]()
Review
|
|||||
|
languageList[fileName.split(".")[0]] = JSON5.parse(json); |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
const en = languageList.en; |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
const englishExtracted = []; |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
// Search the source code for usages of $t(...)
|
||||
![]()
Review
|
|||||
|
const tFuncResults = await findInFiles.find({ |
||||
![]()
Review
|
|||||
|
term: "\\$t\\(([^)]+?)\\)", |
||||
![]()
Review
|
|||||
|
flags: "g", |
||||
![]()
Review
|
|||||
|
}, "./src", "\\.(vue|js)"); |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
// Add the found strings to the englishExtracted list
|
||||
![]()
Review
|
|||||
|
const warnings = []; |
||||
![]()
Review
|
|||||
|
Object.values(tFuncResults).map(result => { |
||||
![]()
Review
|
|||||
|
result.matches.map(match => { |
||||
![]()
Review
|
|||||
|
const functionParams = match.substring(3, match.length - 1).trim(); |
||||
![]()
Review
|
|||||
|
const firstChar = functionParams[0]; |
||||
![]()
Review
|
|||||
|
if (!["\"", "'"].includes(firstChar)) { |
||||
![]()
Review
|
|||||
|
// Variable => cannot extract => output warning
|
||||
![]()
Review
|
|||||
|
warnings.push("Cannot extract non string values in " + match); |
||||
![]()
Review
|
|||||
|
} else { |
||||
![]()
Review
|
|||||
|
// Actual string
|
||||
![]()
Review
|
|||||
|
const content = _.trim(functionParams.split(firstChar)[1], "\"' "); |
||||
![]()
Review
|
|||||
|
englishExtracted.push(content); |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
}); |
||||
![]()
Review
|
|||||
|
}); |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
// Search the source code for usages of <i18n-t tag="..." keypath="...">
|
||||
![]()
Review
|
|||||
|
const i18nTTagResults = await findInFiles.find({ |
||||
![]()
Review
|
|||||
|
term: "<i18n-t[^>]+keypath=\"([^\"]+)\"[^>]*>", |
||||
![]()
Review
|
|||||
|
flags: "g", |
||||
![]()
Review
|
|||||
|
}, "./src", "\\.vue"); |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
// Add the found strings to the englishExtracted list
|
||||
![]()
Review
|
|||||
|
Object.values(i18nTTagResults).map(result => { |
||||
![]()
Review
|
|||||
|
result.matches.map(match => { |
||||
![]()
Review
|
|||||
|
const content = _.trim(match.split("keypath")[1].split("\"")[1], "\"' "); |
||||
![]()
Review
|
|||||
|
englishExtracted.push(content); |
||||
![]()
Review
|
|||||
|
}); |
||||
![]()
Review
|
|||||
|
}); |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
// Update all languages with the missing strings
|
||||
![]()
Review
Do we want to do it here? We already have a script for this: https://github.com/louislam/uptime-kuma/blob/master/extra/update-language-files/index.js ![]()
Review
![]()
Review
Or we can delete ![]()
Review
A single script seems better. Would you like me to remove the other script in this PR? Or should a make a new PR for that? |
|||||
|
for (let extractedTranslation of englishExtracted) { |
||||
![]()
Review
|
|||||
|
for (let langDict of Object.values(languageList)) { |
||||
![]()
Review
|
|||||
|
if (!Object.keys(langDict).includes(extractedTranslation)) { |
||||
![]()
Review
|
|||||
|
langDict[extractedTranslation] = en[extractedTranslation] || extractedTranslation; |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
// Check for translations in other language files that are not in the English file and output warnings for them
|
||||
![]()
Review
|
|||||
|
const englishKeys = Object.keys(en); |
||||
![]()
Review
|
|||||
|
for (let langName of Object.keys(languageList)) { |
||||
![]()
Review
|
|||||
|
if (langName !== "en") { |
||||
![]()
Review
|
|||||
|
const langKeys = Object.keys(languageList[langName]); |
||||
![]()
Review
|
|||||
|
const unusedKeys = _.without(langKeys, ...englishKeys); |
||||
![]()
Review
|
|||||
|
if (unusedKeys.length) { |
||||
![]()
Review
|
|||||
|
warnings.push(`Language file ${langName} contains keys that are not used: ["${unusedKeys.join("\", \"")}"]`); |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
// Write the translation string json back to files
|
||||
![]()
Review
|
|||||
|
for (let langName of Object.keys(languageList)) { |
||||
![]()
Review
|
|||||
|
const translationsString = JSON5.stringify(languageList[langName], { |
||||
![]()
Review
|
|||||
|
quote: "\"", |
||||
![]()
Review
|
|||||
|
space: 4, |
||||
![]()
Review
|
|||||
|
}) |
||||
![]()
Review
|
|||||
|
.replace(/"$/m, "\","); // Add comma to the last line
|
||||
![]()
Review
|
|||||
|
await fs.writeFile(`./src/languages/${_.kebabCase(langName)}.js`, `export default ${translationsString};\n`); |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
// Output warnings if there are any
|
||||
![]()
Review
|
|||||
|
if (warnings.length) { |
||||
![]()
Review
|
|||||
|
console.log("Extraction successful with warnings: \n\t" + warnings.join("\n\t")); |
||||
![]()
Review
|
|||||
|
} else { |
||||
![]()
Review
|
|||||
|
console.log("Extraction successful"); |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
} |
||||
![]()
Review
|
|||||
|
|
||||
![]()
Review
|
|||||
|
extractTranslations(); |
||||
![]()
Review
|
@ -1,3 +0,0 @@ |
|||||
package-lock.json |
|
||||
test.js |
|
||||
languages/ |
|
@ -1,86 +0,0 @@ |
|||||
// Need to use ES6 to read language files
|
|
||||
|
|
||||
import fs from "fs"; |
|
||||
import path from "path"; |
|
||||
import util from "util"; |
|
||||
|
|
||||
// https://stackoverflow.com/questions/13786160/copy-folder-recursively-in-node-js
|
|
||||
/** |
|
||||
* Look ma, it's cp -R. |
|
||||
* @param {string} src The path to the thing to copy. |
|
||||
* @param {string} dest The path to the new copy. |
|
||||
*/ |
|
||||
const copyRecursiveSync = function (src, dest) { |
|
||||
let exists = fs.existsSync(src); |
|
||||
let stats = exists && fs.statSync(src); |
|
||||
let isDirectory = exists && stats.isDirectory(); |
|
||||
|
|
||||
if (isDirectory) { |
|
||||
fs.mkdirSync(dest); |
|
||||
fs.readdirSync(src).forEach(function (childItemName) { |
|
||||
copyRecursiveSync(path.join(src, childItemName), |
|
||||
path.join(dest, childItemName)); |
|
||||
}); |
|
||||
} else { |
|
||||
fs.copyFileSync(src, dest); |
|
||||
} |
|
||||
}; |
|
||||
|
|
||||
console.log("Arguments:", process.argv); |
|
||||
const baseLangCode = process.argv[2] || "en"; |
|
||||
console.log("Base Lang: " + baseLangCode); |
|
||||
if (fs.existsSync("./languages")) { |
|
||||
fs.rmdirSync("./languages", { recursive: true }); |
|
||||
} |
|
||||
copyRecursiveSync("../../src/languages", "./languages"); |
|
||||
|
|
||||
const en = (await import("./languages/en.js")).default; |
|
||||
const baseLang = (await import(`./languages/${baseLangCode}.js`)).default; |
|
||||
const files = fs.readdirSync("./languages"); |
|
||||
console.log("Files:", files); |
|
||||
|
|
||||
for (const file of files) { |
|
||||
if (!file.endsWith(".js")) { |
|
||||
console.log("Skipping " + file); |
|
||||
continue; |
|
||||
} |
|
||||
|
|
||||
console.log("Processing " + file); |
|
||||
const lang = await import("./languages/" + file); |
|
||||
|
|
||||
let obj; |
|
||||
|
|
||||
if (lang.default) { |
|
||||
obj = lang.default; |
|
||||
} else { |
|
||||
console.log("Empty file"); |
|
||||
obj = { |
|
||||
languageName: "<Your Language name in your language (not in English)>" |
|
||||
}; |
|
||||
} |
|
||||
|
|
||||
// En first
|
|
||||
for (const key in en) { |
|
||||
if (! obj[key]) { |
|
||||
obj[key] = en[key]; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
if (baseLang !== en) { |
|
||||
// Base second
|
|
||||
for (const key in baseLang) { |
|
||||
if (! obj[key]) { |
|
||||
obj[key] = key; |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
const code = "export default " + util.inspect(obj, { |
|
||||
depth: null, |
|
||||
}); |
|
||||
|
|
||||
fs.writeFileSync(`../../src/languages/${file}`, code); |
|
||||
} |
|
||||
|
|
||||
fs.rmdirSync("./languages", { recursive: true }); |
|
||||
console.log("Done. Fixing formatting by ESLint..."); |
|
@ -1,12 +0,0 @@ |
|||||
{ |
|
||||
"name": "update-language-files", |
|
||||
"type": "module", |
|
||||
"version": "1.0.0", |
|
||||
"description": "", |
|
||||
"main": "index.js", |
|
||||
"scripts": { |
|
||||
"test": "echo \"Error: no test specified\" && exit 1" |
|
||||
}, |
|
||||
"author": "", |
|
||||
"license": "ISC" |
|
||||
} |
|
Loading…
Reference in new issue