feat(i18n): add type-checked translation keys
This commit is contained in:
98
scripts/generate-i18n-keys.mjs
Normal file
98
scripts/generate-i18n-keys.mjs
Normal file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env node
|
||||
import { promises as fs } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const ROOT_DIR = path.resolve(__dirname, "..");
|
||||
const LOCALE_DIR = path.resolve(ROOT_DIR, "src/locales/en");
|
||||
const KEY_OUTPUT = path.resolve(ROOT_DIR, "src/types/generated/i18n-keys.ts");
|
||||
const RESOURCE_OUTPUT = path.resolve(
|
||||
ROOT_DIR,
|
||||
"src/types/generated/i18n-resources.ts",
|
||||
);
|
||||
|
||||
const isPlainObject = (value) =>
|
||||
typeof value === "object" && value !== null && !Array.isArray(value);
|
||||
|
||||
const flattenKeys = (data, prefix = "") => {
|
||||
const keys = [];
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
const nextPrefix = prefix ? `${prefix}.${key}` : key;
|
||||
if (isPlainObject(value)) {
|
||||
keys.push(...flattenKeys(value, nextPrefix));
|
||||
} else {
|
||||
keys.push(nextPrefix);
|
||||
}
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
|
||||
const buildType = (data, indent = 0) => {
|
||||
if (!isPlainObject(data)) {
|
||||
return "string";
|
||||
}
|
||||
|
||||
const entries = Object.entries(data).sort(([a], [b]) => a.localeCompare(b));
|
||||
const pad = " ".repeat(indent);
|
||||
const inner = entries
|
||||
.map(([key, value]) => {
|
||||
const typeStr = buildType(value, indent + 2);
|
||||
return `${" ".repeat(indent + 2)}${JSON.stringify(key)}: ${typeStr};`;
|
||||
})
|
||||
.join("\n");
|
||||
|
||||
return entries.length
|
||||
? `{
|
||||
${inner}
|
||||
${pad}}`
|
||||
: "{}";
|
||||
};
|
||||
|
||||
const loadNamespaceJson = async () => {
|
||||
const dirents = await fs.readdir(LOCALE_DIR, { withFileTypes: true });
|
||||
const namespaces = [];
|
||||
for (const dirent of dirents) {
|
||||
if (!dirent.isFile() || !dirent.name.endsWith(".json")) continue;
|
||||
const name = dirent.name.replace(/\.json$/, "");
|
||||
const filePath = path.join(LOCALE_DIR, dirent.name);
|
||||
const raw = await fs.readFile(filePath, "utf8");
|
||||
const json = JSON.parse(raw);
|
||||
namespaces.push({ name, json });
|
||||
}
|
||||
namespaces.sort((a, b) => a.name.localeCompare(b.name));
|
||||
return namespaces;
|
||||
};
|
||||
|
||||
const buildKeysFile = (keys) => {
|
||||
const arrayLiteral = keys.map((key) => ` \"${key}\"`).join(",\n");
|
||||
return `// This file is auto-generated by scripts/generate-i18n-keys.mjs\n// Do not edit this file manually.\n\nexport const translationKeys = [\n${arrayLiteral}\n] as const;\n\nexport type TranslationKey = typeof translationKeys[number];\n`;
|
||||
};
|
||||
|
||||
const buildResourcesFile = (namespaces) => {
|
||||
const namespaceEntries = namespaces
|
||||
.map(({ name, json }) => {
|
||||
const typeStr = buildType(json, 4);
|
||||
return ` ${JSON.stringify(name)}: ${typeStr};`;
|
||||
})
|
||||
.join("\n");
|
||||
|
||||
return `// This file is auto-generated by scripts/generate-i18n-keys.mjs\n// Do not edit this file manually.\n\nexport interface TranslationResources {\n translation: {\n${namespaceEntries}\n };\n}\n`;
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
const namespaces = await loadNamespaceJson();
|
||||
const keys = namespaces.flatMap(({ name, json }) => flattenKeys(json, name));
|
||||
const keysContent = buildKeysFile(keys);
|
||||
const resourcesContent = buildResourcesFile(namespaces);
|
||||
await fs.mkdir(path.dirname(KEY_OUTPUT), { recursive: true });
|
||||
await fs.writeFile(KEY_OUTPUT, keysContent, "utf8");
|
||||
await fs.writeFile(RESOURCE_OUTPUT, resourcesContent, "utf8");
|
||||
console.log(`Generated ${keys.length} translation keys.`);
|
||||
};
|
||||
|
||||
main().catch((error) => {
|
||||
console.error("Failed to generate i18n metadata:", error);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
Reference in New Issue
Block a user