mirror of
https://github.com/vyos/vyos-documentation.git
synced 2025-10-26 01:31:44 +02:00
add translation feature
This commit is contained in:
parent
f0175e1638
commit
abd23045bc
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,5 +1,6 @@
|
|||||||
# Sphinx
|
# Sphinx
|
||||||
_build/
|
_build/
|
||||||
|
localazy_private.json
|
||||||
|
|
||||||
# python virtualenv
|
# python virtualenv
|
||||||
venv/
|
venv/
|
||||||
|
|||||||
@ -72,12 +72,12 @@ master_doc = 'index'
|
|||||||
#
|
#
|
||||||
# This is also used if you do content translation via gettext catalogs.
|
# This is also used if you do content translation via gettext catalogs.
|
||||||
# Usually you set "language" from the command line for these cases.
|
# Usually you set "language" from the command line for these cases.
|
||||||
language = None
|
language = "en"
|
||||||
|
|
||||||
# https://docs.readthedocs.io/en/stable/guides/manage-translations-sphinx.html#create-translatable-files
|
# https://docs.readthedocs.io/en/stable/guides/manage-translations-sphinx.html#create-translatable-files
|
||||||
locale_dirs = ['_locale/']
|
locale_dirs = ['_locale/']
|
||||||
gettext_compact = False
|
gettext_compact = True
|
||||||
gettext_uuid = True
|
gettext_uuid = False
|
||||||
|
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
|
|||||||
59
languagechecker.py
Normal file
59
languagechecker.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
'''
|
||||||
|
Parse gettext pot files and extract path:line and msgid information
|
||||||
|
compare this with downloaded files from localazy
|
||||||
|
the output are the elements which are downloaded but not needed anymore.
|
||||||
|
TODO: better output
|
||||||
|
'''
|
||||||
|
|
||||||
|
import os
|
||||||
|
from babel.messages.pofile import read_po
|
||||||
|
|
||||||
|
|
||||||
|
def extract_content(file):
|
||||||
|
content = []
|
||||||
|
with open(file) as f:
|
||||||
|
data = read_po(f)
|
||||||
|
for message in data:
|
||||||
|
if message.id:
|
||||||
|
content.append(message)
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
gettext_dir = "docs/_build/gettext"
|
||||||
|
gettext_ext = ".pot"
|
||||||
|
original_content = list()
|
||||||
|
|
||||||
|
language_dir = "docs/_locale"
|
||||||
|
language_ext = ".pot"
|
||||||
|
language_content = dict()
|
||||||
|
|
||||||
|
# get gettext filepath
|
||||||
|
for (dirpath, dirnames, filenames) in os.walk(gettext_dir):
|
||||||
|
for file in filenames:
|
||||||
|
if gettext_ext in file:
|
||||||
|
original_content.extend(extract_content(f"{dirpath}/{file}"))
|
||||||
|
|
||||||
|
# get filepath per language
|
||||||
|
languages = next(os.walk(language_dir))[1]
|
||||||
|
for language in languages:
|
||||||
|
|
||||||
|
language_content[language] = list()
|
||||||
|
for (dirpath, dirnames, filenames) in os.walk(f"{language_dir}/{language}"):
|
||||||
|
for file in filenames:
|
||||||
|
if language_ext in file:
|
||||||
|
language_content[language].extend(extract_content(f"{dirpath}/{file}"))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
for lang in language_content.keys():
|
||||||
|
for message in language_content[lang]:
|
||||||
|
found = False
|
||||||
|
for ori_message in original_content:
|
||||||
|
if ori_message.id == message.id:
|
||||||
|
found = True
|
||||||
|
if not found:
|
||||||
|
print()
|
||||||
|
print(f"{lang}: {message.id}")
|
||||||
|
for loc in message.locations:
|
||||||
|
print(f"{loc[0]}:{loc[1]}")
|
||||||
|
|
||||||
@ -1,11 +1,12 @@
|
|||||||
{
|
{
|
||||||
"upload": {
|
"upload": {
|
||||||
"type": "pot",
|
"type": "pot",
|
||||||
"folder": "docs/_build/gettext",
|
"folder": "docs/_build/gettext",
|
||||||
"files": {
|
"files": {
|
||||||
"pattern": "**.pot",
|
"pattern": "**.pot",
|
||||||
"excludes": [
|
"excludes": [
|
||||||
"changelog/*.pot",
|
"changelog/*.pot",
|
||||||
|
"changelog.pot",
|
||||||
"coverage.pot"
|
"coverage.pot"
|
||||||
],
|
],
|
||||||
"path": "${path}"
|
"path": "${path}"
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user