forked from home-assistant/core
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Support serving of backend translations (home-assistant#12453)
* Add view to support backend translation fetching * Load backend translations from component json * Translations for season sensor * Scripts to merge and unpack Lokalise translations * Fix copy paste error * Serve post-lokalise translations to frontend * Linting * Auto-deploy translations with Travis * Commit post-lokalise translation files * Split logic into more helper functions * Fall back to English for missing keys * Move local translation copies to `.translations` * Linting * Initial tests * Remove unnecessary file check * Convert translation helper to async/await * Convert translation helper tests to async/await * Use set subtraction to find missing_components * load_translation_files use component->file mapping * Remove duplicated resources fetching Get to take advantage of the slick Python 3.5 dict merging here. * Switch to live project ID
- Loading branch information
Showing
19 changed files
with
575 additions
and
6 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -103,3 +103,6 @@ desktop.ini | |
|
||
# mypy | ||
/.mypy_cache/* | ||
|
||
# Secrets | ||
.lokalise_token |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
{ | ||
"state": { | ||
"autumn": "Autumn", | ||
"spring": "Spring", | ||
"summer": "Summer", | ||
"winter": "Winter" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
{ | ||
"state": { | ||
"spring": "Spring", | ||
"summer": "Summer", | ||
"autumn": "Autumn", | ||
"winter": "Winter" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,126 @@ | ||
"""Translation string lookup helpers.""" | ||
import logging | ||
# pylint: disable=unused-import | ||
from typing import Optional # NOQA | ||
from os import path | ||
|
||
from homeassistant.loader import get_component, bind_hass | ||
from homeassistant.util.json import load_json | ||
|
||
_LOGGER = logging.getLogger(__name__) | ||
|
||
TRANSLATION_STRING_CACHE = 'translation_string_cache' | ||
|
||
|
||
def recursive_flatten(prefix, data): | ||
"""Return a flattened representation of dict data.""" | ||
output = {} | ||
for key, value in data.items(): | ||
if isinstance(value, dict): | ||
output.update( | ||
recursive_flatten('{}{}.'.format(prefix, key), value)) | ||
else: | ||
output['{}{}'.format(prefix, key)] = value | ||
return output | ||
|
||
|
||
def flatten(data): | ||
"""Return a flattened representation of dict data.""" | ||
return recursive_flatten('', data) | ||
|
||
|
||
def component_translation_file(component, language): | ||
"""Return the translation json file location for a component.""" | ||
if '.' in component: | ||
name = component.split('.', 1)[1] | ||
else: | ||
name = component | ||
|
||
module = get_component(component) | ||
component_path = path.dirname(module.__file__) | ||
|
||
# If loading translations for the package root, (__init__.py), the | ||
# prefix should be skipped. | ||
if module.__name__ == module.__package__: | ||
filename = '{}.json'.format(language) | ||
else: | ||
filename = '{}.{}.json'.format(name, language) | ||
|
||
return path.join(component_path, '.translations', filename) | ||
|
||
|
||
def load_translations_files(translation_files): | ||
"""Load and parse translation.json files.""" | ||
loaded = {} | ||
for component, translation_file in translation_files.items(): | ||
loaded[component] = load_json(translation_file) | ||
|
||
return loaded | ||
|
||
|
||
def build_resources(translation_cache, components): | ||
"""Build the resources response for the given components.""" | ||
# Build response | ||
resources = {} | ||
for component in components: | ||
if '.' not in component: | ||
domain = component | ||
else: | ||
domain = component.split('.', 1)[0] | ||
|
||
if domain not in resources: | ||
resources[domain] = {} | ||
|
||
# Add the translations for this component to the domain resources. | ||
# Since clients cannot determine which platform an entity belongs to, | ||
# all translations for a domain will be returned together. | ||
resources[domain].update(translation_cache[component]) | ||
|
||
return resources | ||
|
||
|
||
@bind_hass | ||
async def async_get_component_resources(hass, language): | ||
"""Return translation resources for all components.""" | ||
if TRANSLATION_STRING_CACHE not in hass.data: | ||
hass.data[TRANSLATION_STRING_CACHE] = {} | ||
if language not in hass.data[TRANSLATION_STRING_CACHE]: | ||
hass.data[TRANSLATION_STRING_CACHE][language] = {} | ||
translation_cache = hass.data[TRANSLATION_STRING_CACHE][language] | ||
|
||
# Get the set of components | ||
components = hass.config.components | ||
|
||
# Calculate the missing components | ||
missing_components = components - set(translation_cache) | ||
missing_files = {} | ||
for component in missing_components: | ||
missing_files[component] = component_translation_file( | ||
component, language) | ||
|
||
# Load missing files | ||
if missing_files: | ||
loaded_translations = await hass.async_add_job( | ||
load_translations_files, missing_files) | ||
|
||
# Update cache | ||
for component, translation_data in loaded_translations.items(): | ||
translation_cache[component] = translation_data | ||
|
||
resources = build_resources(translation_cache, components) | ||
|
||
# Return the component translations resources under the 'component' | ||
# translation namespace | ||
return flatten({'component': resources}) | ||
|
||
|
||
@bind_hass | ||
async def async_get_translations(hass, language): | ||
"""Return all backend translations.""" | ||
resources = await async_get_component_resources(hass, language) | ||
if language != 'en': | ||
# Fetch the English resources, as a fallback for missing keys | ||
base_resources = await async_get_component_resources(hass, 'en') | ||
resources = {**base_resources, **resources} | ||
|
||
return resources |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
#!/usr/bin/env bash | ||
|
||
# Safe bash settings | ||
# -e Exit on command fail | ||
# -u Exit on unset variable | ||
# -o pipefail Exit if piped command has error code | ||
set -eu -o pipefail | ||
|
||
cd "$(dirname "$0")/.." | ||
|
||
if [ -z "${LOKALISE_TOKEN-}" ] && [ ! -f .lokalise_token ] ; then | ||
echo "Lokalise API token is required to download the latest set of" \ | ||
"translations. Please create an account by using the following link:" \ | ||
"https://lokalise.co/signup/130246255a974bd3b5e8a1.51616605/all/" \ | ||
"Place your token in a new file \".lokalise_token\" in the repo" \ | ||
"root directory." | ||
exit 1 | ||
fi | ||
|
||
# Load token from file if not already in the environment | ||
[ -z "${LOKALISE_TOKEN-}" ] && LOKALISE_TOKEN="$(<.lokalise_token)" | ||
|
||
PROJECT_ID="130246255a974bd3b5e8a1.51616605" | ||
LOCAL_DIR="$(pwd)/build/translations-download" | ||
FILE_FORMAT=json | ||
|
||
mkdir -p ${LOCAL_DIR} | ||
|
||
docker pull lokalise/lokalise-cli | ||
docker run \ | ||
-v ${LOCAL_DIR}:/opt/dest/locale \ | ||
lokalise/lokalise-cli lokalise \ | ||
--token ${LOKALISE_TOKEN} \ | ||
export ${PROJECT_ID} \ | ||
--export_empty skip \ | ||
--type json \ | ||
--unzip_to /opt/dest | ||
|
||
script/translations_download_split.py |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
#!/usr/bin/env python3 | ||
"""Merge all translation sources into a single JSON file.""" | ||
import glob | ||
import os | ||
import re | ||
|
||
from homeassistant.util import json as json_util | ||
|
||
FILENAME_FORMAT = re.compile(r'strings\.(?P<suffix>\w+)\.json') | ||
|
||
|
||
def get_language(path): | ||
"""Get the language code for the given file path.""" | ||
return os.path.splitext(os.path.basename(path))[0] | ||
|
||
|
||
def get_component_path(lang, component): | ||
"""Get the component translation path.""" | ||
if os.path.isdir(os.path.join("homeassistant", "components", component)): | ||
return os.path.join( | ||
"homeassistant", "components", component, ".translations", | ||
"{}.json".format(lang)) | ||
else: | ||
return os.path.join( | ||
"homeassistant", "components", ".translations", | ||
"{}.{}.json".format(component, lang)) | ||
|
||
|
||
def get_platform_path(lang, component, platform): | ||
"""Get the platform translation path.""" | ||
if os.path.isdir(os.path.join( | ||
"homeassistant", "components", component, platform)): | ||
return os.path.join( | ||
"homeassistant", "components", component, platform, | ||
".translations", "{}.json".format(lang)) | ||
else: | ||
return os.path.join( | ||
"homeassistant", "components", component, ".translations", | ||
"{}.{}.json".format(platform, lang)) | ||
|
||
|
||
def get_component_translations(translations): | ||
"""Get the component level translations.""" | ||
translations = translations.copy() | ||
translations.pop('platform', None) | ||
|
||
return translations | ||
|
||
|
||
def save_language_translations(lang, translations): | ||
"""Distribute the translations for this language.""" | ||
components = translations.get('component', {}) | ||
for component, component_translations in components.items(): | ||
base_translations = get_component_translations(component_translations) | ||
if base_translations: | ||
path = get_component_path(lang, component) | ||
os.makedirs(os.path.dirname(path), exist_ok=True) | ||
json_util.save_json(path, base_translations) | ||
|
||
for platform, platform_translations in component_translations.get( | ||
'platform', {}).items(): | ||
path = get_platform_path(lang, component, platform) | ||
os.makedirs(os.path.dirname(path), exist_ok=True) | ||
json_util.save_json(path, platform_translations) | ||
|
||
|
||
def main(): | ||
"""Main section of the script.""" | ||
if not os.path.isfile("requirements_all.txt"): | ||
print("Run this from HA root dir") | ||
return | ||
|
||
paths = glob.iglob("build/translations-download/*.json") | ||
for path in paths: | ||
lang = get_language(path) | ||
translations = json_util.load_json(path) | ||
save_language_translations(lang, translations) | ||
|
||
|
||
if __name__ == '__main__': | ||
main() |
Oops, something went wrong.