From 448d258addc39a7a95613546c9b2981b32f12d92 Mon Sep 17 00:00:00 2001 From: Felix Fontein Date: Sat, 6 Mar 2021 17:19:58 +0100 Subject: [PATCH 1/2] Allow collections to have extra documentation next to the generated plugin docs. --- antsibull/build_changelog.py | 8 +- antsibull/changelog.py | 4 +- antsibull/cli/antsibull_lint.py | 29 +++ antsibull/cli/doc_commands/stable.py | 19 +- .../data/docsite/plugins_by_collection.rst.j2 | 19 ++ antsibull/extra_docs.py | 232 ++++++++++++++++++ antsibull/lint_extra_docs.py | 81 ++++++ antsibull/write_docs.py | 68 ++++- 8 files changed, 449 insertions(+), 11 deletions(-) create mode 100644 antsibull/extra_docs.py create mode 100644 antsibull/lint_extra_docs.py diff --git a/antsibull/build_changelog.py b/antsibull/build_changelog.py index 6e9bfe69..6546922b 100644 --- a/antsibull/build_changelog.py +++ b/antsibull/build_changelog.py @@ -247,7 +247,7 @@ def dump_items(builder: RstBuilder, items: PluginDumpT) -> None: def add_plugins(builder: RstBuilder, data: PluginDataT) -> None: plugins: PluginDumpT = [] - for name, prefix, _, release_entry in data: + for name, prefix, dummy, release_entry in data: if release_entry: for plugin_type, plugin_datas in release_entry.plugins.items(): for plugin_data in plugin_datas: @@ -261,7 +261,7 @@ def add_plugins(builder: RstBuilder, data: PluginDataT) -> None: def add_objects(builder: RstBuilder, data: PluginDataT) -> None: objects: PluginDumpT = [] - for name, prefix, _, release_entry in data: + for name, prefix, dummy, release_entry in data: if release_entry: for object_type, object_datas in release_entry.objects.items(): for object_data in object_datas: @@ -274,7 +274,7 @@ def add_objects(builder: RstBuilder, data: PluginDataT) -> None: def add_modules(builder: RstBuilder, data: PluginDataT) -> None: modules: PluginDumpT = [] - for name, prefix, _, release_entry in data: + for name, prefix, dummy, release_entry in data: if release_entry: for module in release_entry.modules: namespace = module.get('namespace') or '' @@ -351,7 +351,7 @@ def append_changelog(builder: RstBuilder, for section, section_title in DEFAULT_SECTIONS: maybe_add_section_title = create_title_adder(builder, section_title, 1) - for name, _, _, release_entry in data: + for name, dummy, dummy, release_entry in data: if not release_entry or release_entry.has_no_changes([section]): continue diff --git a/antsibull/changelog.py b/antsibull/changelog.py index 0d48ea07..93ce1a22 100644 --- a/antsibull/changelog.py +++ b/antsibull/changelog.py @@ -240,7 +240,7 @@ async def _get_changelog_file(self, version: PypiVer, path = await base_downloader(str(version)) if os.path.isdir(path): changelog: t.Optional[ChangelogData] = None - for root, _, files in os.walk(path): + for root, dummy, files in os.walk(path): if 'changelog.yaml' in files: with open(os.path.join(root, 'changelog.yaml'), 'rb') as f: changelog = f.read() @@ -496,7 +496,7 @@ def get_changelog( changelog = [] sorted_versions = collect_versions(versions, ansible_changelog.config) - for index, (version_str, _) in enumerate(sorted_versions): + for index, (version_str, dummy) in enumerate(sorted_versions): version, deps = versions[version_str] prev_version = None if index + 1 < len(sorted_versions): diff --git a/antsibull/cli/antsibull_lint.py b/antsibull/cli/antsibull_lint.py index 7031b701..986b648e 100644 --- a/antsibull/cli/antsibull_lint.py +++ b/antsibull/cli/antsibull_lint.py @@ -24,6 +24,7 @@ from antsibull_changelog.logger import setup_logger from ..args import get_toplevel_parser, normalize_toplevel_options +from ..lint_extra_docs import lint_collection_extra_docs_files def run(args: List[str]) -> int: @@ -55,6 +56,16 @@ def run(args: List[str]) -> int: metavar='/path/to/changelog.yaml', help='path to changelogs/changelog.yaml') + collection_docs = subparsers.add_parser('collection-docs', + parents=[common], + help='Collection extra docs linter for inclusion' + ' in docsite') + collection_docs.set_defaults(command=command_lint_collection_docs) + + collection_docs.add_argument('collection_root_path', + metavar='/path/to/collection', + help='path to collection (directory that includes galaxy.yml)') + if HAS_ARGCOMPLETE: argcomplete.autocomplete(parser) @@ -94,6 +105,24 @@ def command_lint_changelog(args: Any) -> int: return 3 if messages else 0 +def command_lint_collection_docs(args: Any) -> int: + """ + Validate docs/docsite/rst/ in a collection. + + :arg args: Parsed arguments + """ + errors = lint_collection_extra_docs_files(args.collection_root_path) + + messages = sorted(set( + '%s:%d:%d: %s' % (error[0], error[1], error[2], error[3]) + for error in errors)) + + for message in messages: + print(message) + + return 3 if messages else 0 + + def main() -> int: """ Entrypoint called from the script. diff --git a/antsibull/cli/doc_commands/stable.py b/antsibull/cli/doc_commands/stable.py index f30434aa..46417759 100644 --- a/antsibull/cli/doc_commands/stable.py +++ b/antsibull/cli/doc_commands/stable.py @@ -21,6 +21,7 @@ from ...collections import install_together from ...compat import asyncio_run, best_get_loop from ...dependency_files import DepsFile +from ...extra_docs import load_collections_extra_docs from ...docs_parsing.parsing import get_ansible_plugin_info from ...docs_parsing.fqcn import get_fqcn_parts from ...docs_parsing.routing import ( @@ -39,6 +40,7 @@ output_collection_namespace_indexes, output_indexes, output_plugin_indexes, + output_extra_docs, ) from ...utils.transformations import get_collection_namespaces @@ -329,8 +331,16 @@ def generate_docs_for_all_collections(venv: t.Union[VenvRunner, FakeVenvRunner], flog.debug('Finished loading errors') """ + # Load collection extra docs data + extra_docs_data = asyncio_run(load_collections_extra_docs( + {name: data.path for name, data in collection_metadata.items()})) + flog.debug('Finished getting collection extra docs data') + plugin_contents = get_plugin_contents(plugin_info, nonfatal_errors) collection_to_plugin_info = get_collection_contents(plugin_contents) + # Make sure collections without documentable plugins are mentioned + for collection in collection_metadata: + collection_to_plugin_info[collection] flog.debug('Finished getting collection data') collection_namespaces = get_collection_namespaces(collection_to_plugin_info.keys()) @@ -347,13 +357,14 @@ def generate_docs_for_all_collections(venv: t.Union[VenvRunner, FakeVenvRunner], asyncio_run(output_indexes(collection_to_plugin_info, dest_dir, collection_metadata=collection_metadata, - squash_hierarchy=squash_hierarchy)) + squash_hierarchy=squash_hierarchy, + extra_docs_data=extra_docs_data)) flog.notice('Finished writing indexes') asyncio_run(output_all_plugin_stub_rst(stubs_info, dest_dir, collection_metadata=collection_metadata, squash_hierarchy=squash_hierarchy)) - flog.debug('Finished writing plugin subs') + flog.debug('Finished writing plugin stubs') asyncio_run(output_all_plugin_rst(collection_to_plugin_info, plugin_info, nonfatal_errors, dest_dir, @@ -361,6 +372,10 @@ def generate_docs_for_all_collections(venv: t.Union[VenvRunner, FakeVenvRunner], squash_hierarchy=squash_hierarchy)) flog.debug('Finished writing plugin docs') + asyncio_run(output_extra_docs(dest_dir, extra_docs_data, + squash_hierarchy=squash_hierarchy)) + flog.debug('Finished writing extra extra docs docs') + def generate_docs() -> int: """ diff --git a/antsibull/data/docsite/plugins_by_collection.rst.j2 b/antsibull/data/docsite/plugins_by_collection.rst.j2 index 36ea1e20..e1058846 100644 --- a/antsibull/data/docsite/plugins_by_collection.rst.j2 +++ b/antsibull/data/docsite/plugins_by_collection.rst.j2 @@ -14,10 +14,29 @@ Collection version @{ collection_version }@ .. toctree:: :maxdepth: 1 +{% for section in extra_docs_sections %} +@{section.title}@ +@{ '-' * (section.title | length) }@ + +{% if section.toctree %} +.. toctree:: + :maxdepth: 1 + +{% for toctree_entry in section.toctree %} + @{toctree_entry}@ +{% endfor %} +{% endif %} + +{% endfor %} + Plugin Index ------------ +{% if plugin_maps %} These are the plugins in the @{collection_name}@ collection +{% else %} +There are no plugins in the @{collection_name}@ collection with automatically generated documentation. +{% endif %} {% for category, plugins in plugin_maps.items() | sort %} diff --git a/antsibull/extra_docs.py b/antsibull/extra_docs.py new file mode 100644 index 00000000..5ddb8e5d --- /dev/null +++ b/antsibull/extra_docs.py @@ -0,0 +1,232 @@ +# coding: utf-8 +# Author: Felix Fontein +# License: GPLv3+ +# Copyright: Ansible Project, 2021 + +import asyncio +import os +import os.path +import re +import typing as t + +import aiofiles +import asyncio_pool + +from . import app_context +from .logging import log +from .yaml import load_yaml_file + + +mlog = log.fields(mod=__name__) + +_RST_LABEL_DEFINITION = re.compile(r'''^\.\. _([^:]+):''') + + +class ExtraDocsIndexError(Exception): + pass + + +class Section: + title: str + toctree: t.List[str] + + def __init__(self, title: str, toctree: t.List[str]): + self.title = title + self.toctree = toctree + + +#: A tuple consisting of a list of sections and a list of RST documents as tuples +#: (relative path in docs/docsite/rst, content). +CollectionExtraDocsInfoT = t.Tuple[t.List[Section], t.List[t.Tuple[str, str]]] + + +def find_extra_docs(path_to_collection: str) -> t.List[t.Tuple[str, str]]: + '''Enumerate all extra docs RST files for a collection path. + + :arg path_to_collection: Path to a collection. + :arg collection_name: Dotted collection name. + :returns: A list of tuples (absolute path, relative path in docs/docsite/rst) + ''' + docs_dir = os.path.join(path_to_collection, 'docs', 'docsite', 'rst') + if not os.path.isdir(docs_dir): + return [] + result = [] + for dirname, dirs, files in os.walk(docs_dir): + for file in files: + if file.endswith('.rst'): + path = os.path.join(dirname, file) + result.append((path, os.path.normpath(os.path.relpath(path, docs_dir)))) + return result + + +def get_label_prefix(collection_name: str) -> str: + '''Create RST label prefix for the given collection name. + + :arg collection_name: Dotted collection name. + :returns: A RST label prefix + ''' + return f'ansible_collections.{collection_name}.docsite.' + + +def lint_required_conditions(content: str, collection_name: str + ) -> t.Tuple[t.List[str], t.List[t.Tuple[int, int, str]]]: + '''Check a extra docs RST file's content for whether it satisfied the required conditions. + + :arg content: Content of a RST document. + :arg collection_name: Dotted collection name. + :returns: A tuple consisting of a list of RST labels and a list of error messages + (with line and column numbers). + ''' + labels: t.Set[str] = set() + errors: t.List[t.Tuple[int, int, str]] = [] + label_prefix = get_label_prefix(collection_name) + # Check label definitions + for row, line in enumerate(content.splitlines()): + m = _RST_LABEL_DEFINITION.match(line) + if m: + label = m.group(1) + if not label.startswith(label_prefix): + errors.append(( + row + 1, + 0, + f'Label "{label}" does not start with expected prefix "{label_prefix}"')) + else: + labels.add(label) + return sorted(labels), errors + + +def load_toctree(yaml_section: t.Dict[str, t.Any], section_index: int = 0 + ) -> t.Tuple[t.List[str], t.List[str]]: + errors: t.List[str] = [] + toctree: t.List[str] = [] + if 'toctree' in yaml_section: + for toctree_index, toctree_name in enumerate(yaml_section['toctree']): + if not isinstance(toctree_name, str): + errors.append( + f'Toctree entry #{toctree_index} in section #{section_index}' + f' is not a string') + continue + toctree.append(toctree_name) + return toctree, errors + + +def load_section(yaml_section: t.Dict[str, t.Any], section_index: int = 0 + ) -> t.Tuple[t.Optional[Section], t.List[str]]: + errors: t.List[str] = [] + missing = False + for required_key in ('title', ): + if required_key not in yaml_section: + errors.append( + f'Section #{section_index} has no "{required_key}" entry') + missing = True + if missing: + return None, errors + toctree, toctree_errors = load_toctree(yaml_section, section_index) + errors.extend(toctree_errors) + if not toctree: + errors.append( + f'Section #{section_index} has no content') + return None, errors + return Section(yaml_section['title'], toctree), errors + + +def load_extra_docs_index(index_path: str) -> t.Tuple[t.List[Section], t.List[str]]: + '''Load a collection's extra-docs.yml file. + + :arg index_path: Path to extra-docs.yml (does not need to exist). + :returns: A tuple consisting of a list of sections and a list of error messages. + :raises: ExtraDocsIndexError if extra-docs.yml does not exist + ''' + sections: t.List[Section] = [] + errors: t.List[str] = [] + + if not os.path.isfile(index_path): + raise ExtraDocsIndexError('extra-docs.yml does not exist') + + try: + index = load_yaml_file(index_path) + if index.get('sections'): + for section_index, yaml_section in enumerate(index['sections']): + if not isinstance(yaml_section, dict): + errors.append(f'Section #{section_index} must be a mapping') + continue + section, section_errors = load_section(yaml_section, section_index) + if section is not None: + sections.append(section) + errors.extend(section_errors) + except Exception as exc: + errors.append(str(exc)) + + return sections, errors + + +async def load_collection_extra_docs(collection_name: str, + collection_path: str, + path_prefix: str = 'docsite/' + ) -> CollectionExtraDocsInfoT: + '''Given a collection name and collection metadata, load extra docs data. + + :arg collection_name: Dotted collection name. + :arg collection_path: Path to the collection. + :arg path_prefix: Prefix to add to relative paths, and toctree entries. + :returns: A tuple consisting of a list of sections and a list of RST documents as tuples + (relative path in docs/docsite/rst, content). + ''' + flog = mlog.fields(func='load_collection_extra_docs') + flog.debug('Enter') + + index_path = os.path.join(collection_path, 'docs', 'docsite', 'extra-docs.yml') + try: + sections, dummy = load_extra_docs_index(index_path) + except ExtraDocsIndexError: + sections = [] + + for section in sections: + for i, toctree in enumerate(section.toctree): + section.toctree[i] = path_prefix + toctree + documents = [] + for doc in find_extra_docs(collection_path): + try: + # Load content + async with aiofiles.open(doc[0], 'r', encoding='utf-8') as f: + content = await f.read() + + # Lint content + dummy, errors = lint_required_conditions(content, collection_name) + + # When no errors were found, add to output + if not errors: + documents.append((path_prefix + doc[1], content)) + except Exception: + pass + + flog.debug('Leave') + return sections, documents + + +async def load_collections_extra_docs(collection_paths: t.Mapping[str, str] + ) -> t.Mapping[str, CollectionExtraDocsInfoT]: + '''Load extra docs data. + + :arg collection_paths: Mapping of collection_name to the collection's path. + :returns: A mapping of collection_name to CollectionExtraDocsInfoT. + ''' + flog = mlog.fields(func='load_collections_extra_docs') + flog.debug('Enter') + + loaders = {} + lib_ctx = app_context.lib_ctx.get() + + async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: + for collection_name, collection_path in collection_paths.items(): + loaders[collection_name] = await pool.spawn( + load_collection_extra_docs(collection_name, collection_path)) + + responses = await asyncio.gather(*loaders.values()) + + # Note: Python dicts have always had a stable order as long as you don't modify the dict. + # So loaders (implicitly, the keys) and responses have a matching order here. + result = dict(zip(loaders, responses)) + + flog.debug('Leave') + return result diff --git a/antsibull/lint_extra_docs.py b/antsibull/lint_extra_docs.py new file mode 100644 index 00000000..337277ec --- /dev/null +++ b/antsibull/lint_extra_docs.py @@ -0,0 +1,81 @@ +# coding: utf-8 +# Author: Felix Fontein +# License: GPLv3+ +# Copyright: Ansible Project, 2021 + +import os +import os.path +import re +import typing as t + +import docutils.utils +import rstcheck + +from .extra_docs import ( + find_extra_docs, + lint_required_conditions, + load_extra_docs_index, + ExtraDocsIndexError, +) +from .yaml import load_yaml_file + + +_RST_LABEL_DEFINITION = re.compile(r'''^\.\. _([^:]+):''') + + +def load_collection_name(path_to_collection: str) -> str: + '''Load collection name (namespace.name) from collection's galaxy.yml.''' + galaxy_yml_path = os.path.join(path_to_collection, 'galaxy.yml') + if not os.path.isfile(galaxy_yml_path): + raise Exception(f'Cannot find file {galaxy_yml_path}') + + galaxy_yml = load_yaml_file(galaxy_yml_path) + collection_name = '{namespace}.{name}'.format(**galaxy_yml) + return collection_name + + +def lint_optional_conditions(content: str, path: str, collection_name: str + ) -> t.List[t.Tuple[int, int, str]]: + '''Check a extra docs RST file's content for whether it satisfied the required conditions. + + Return a list of errors. + ''' + results = rstcheck.check( + content, filename=path, + report_level=docutils.utils.Reporter.WARNING_LEVEL) + return [(result[0], 0, result[1]) for result in results] + + +def lint_collection_extra_docs_files(path_to_collection: str + ) -> t.List[t.Tuple[str, int, int, str]]: + try: + collection_name = load_collection_name(path_to_collection) + except Exception: + return [( + path_to_collection, 0, 0, 'Cannot identify collection with galaxy.yml at this path')] + result = [] + all_labels = set() + docs = find_extra_docs(path_to_collection) + for doc in docs: + try: + # Load content + with open(doc[0], 'r', encoding='utf-8') as f: + content = f.read() + # Rstcheck + errors = lint_optional_conditions(content, doc[0], collection_name) + result.extend((doc[0], line, col, msg) for (line, col, msg) in errors) + # Lint labels + labels, errors = lint_required_conditions(content, collection_name) + all_labels.update(labels) + result.extend((doc[0], line, col, msg) for (line, col, msg) in errors) + except Exception as e: + result.append((doc[0], 0, 0, str(e))) + index_path = os.path.join(path_to_collection, 'docs', 'docsite', 'extra-docs.yml') + try: + sections, errors = load_extra_docs_index(index_path) + result.extend((index_path, 0, 0, error) for error in errors) + except ExtraDocsIndexError as exc: + if len(docs) > 0: + # Only report the missing index_path as an error if we found documents + result.append((index_path, 0, 0, str(exc))) + return result diff --git a/antsibull/write_docs.py b/antsibull/write_docs.py index d8dd54ea..d5fdd24c 100644 --- a/antsibull/write_docs.py +++ b/antsibull/write_docs.py @@ -16,6 +16,7 @@ from . import app_context from .jinja2.environment import doc_environment from .logging import log +from .extra_docs import CollectionExtraDocsInfoT from .docs_parsing import AnsibleCollectionMetadata @@ -37,6 +38,22 @@ ADD_TOCTREES = True +async def write_file(path: str, content: str) -> None: + """ + Write content to a given path. + + :arg path: Path of the file to write. + :arg content: Content to write into the file. + """ + flog = mlog.fields(func='write_file') + flog.debug('Enter') + + async with aiofiles.open(path, 'w') as f: + await f.write(content) + + flog.debug('Leave') + + async def write_plugin_rst(collection_name: str, collection_meta: AnsibleCollectionMetadata, plugin_short_name: str, plugin_type: str, plugin_record: t.Dict[str, t.Any], nonfatal_errors: t.Sequence[str], @@ -333,7 +350,8 @@ async def write_plugin_lists(collection_name: str, plugin_maps: t.Mapping[str, t.Mapping[str, str]], template: Template, dest_dir: str, - collection_meta: AnsibleCollectionMetadata) -> None: + collection_meta: AnsibleCollectionMetadata, + extra_docs_data: CollectionExtraDocsInfoT) -> None: """ Write an index page for each collection. @@ -343,12 +361,14 @@ async def write_plugin_lists(collection_name: str, :arg template: A template to render the collection index. :arg dest_dir: The destination directory to output the index into. :arg collection_meta: Metadata for the collection. + :arg extra_docs_data: Extra docs data for the collection. """ index_contents = template.render( collection_name=collection_name, plugin_maps=plugin_maps, collection_version=collection_meta.version, - add_toctrees=ADD_TOCTREES) + add_toctrees=ADD_TOCTREES, + extra_docs_sections=extra_docs_data[0]) # This is only safe because we made sure that the top of the directory tree we're writing to # (docs/docsite/rst) is only writable by us. @@ -464,6 +484,7 @@ async def output_plugin_indexes(plugin_info: PluginCollectionInfoT, async def output_indexes(collection_to_plugin_info: CollectionInfoT, dest_dir: str, collection_metadata: t.Mapping[str, AnsibleCollectionMetadata], + extra_docs_data: t.Mapping[str, CollectionExtraDocsInfoT], squash_hierarchy: bool = False, ) -> None: """ @@ -473,6 +494,7 @@ async def output_indexes(collection_to_plugin_info: CollectionInfoT, of plugin_name to short_description. :arg dest_dir: The directory to place the documentation in. :arg collection_metadata: Dictionary mapping collection names to collection metadata objects. + :arg extra_docs_data: Dictionary mapping collection names to CollectionExtraDocsInfoT. :arg squash_hierarchy: If set to ``True``, no directory hierarchy will be used. Undefined behavior if documentation for multiple collections are created. @@ -508,7 +530,47 @@ async def output_indexes(collection_to_plugin_info: CollectionInfoT, collection_dir = collection_toplevel writers.append(await pool.spawn( write_plugin_lists(collection_name, plugin_maps, collection_plugins_tmpl, - collection_dir, collection_metadata[collection_name]))) + collection_dir, collection_metadata[collection_name], + extra_docs_data[collection_name]))) + + await asyncio.gather(*writers) + + flog.debug('Leave') + + +async def output_extra_docs(dest_dir: str, + extra_docs_data: t.Mapping[str, CollectionExtraDocsInfoT], + squash_hierarchy: bool = False) -> None: + """ + Generate collection-level index pages for the collections. + + :arg dest_dir: The directory to place the documentation in. + :arg extra_docs_data: Dictionary mapping collection names to CollectionExtraDocsInfoT. + :arg squash_hierarchy: If set to ``True``, no directory hierarchy will be used. + Undefined behavior if documentation for multiple collections are + created. + """ + flog = mlog.fields(func='output_extra_docs') + flog.debug('Enter') + + writers = [] + lib_ctx = app_context.lib_ctx.get() + + if not squash_hierarchy: + collection_toplevel = os.path.join(dest_dir, 'collections') + else: + collection_toplevel = dest_dir + + async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: + for collection_name, (dummy, documents) in extra_docs_data.items(): + if not squash_hierarchy: + collection_dir = os.path.join(collection_toplevel, *(collection_name.split('.'))) + else: + collection_dir = collection_toplevel + for path, content in documents: + full_path = os.path.join(collection_dir, path) + os.makedirs(os.path.dirname(full_path), mode=0o755, exist_ok=True) + writers.append(await pool.spawn(write_file(full_path, content))) await asyncio.gather(*writers) From 93a70739c465b1878798f679d5e50fe09d8f7791 Mon Sep 17 00:00:00 2001 From: Felix Fontein Date: Mon, 24 May 2021 21:12:39 +0200 Subject: [PATCH 2/2] Do not keep extra docs content. --- antsibull/extra_docs.py | 12 ++++++------ antsibull/write_docs.py | 24 +++++++++++++++--------- 2 files changed, 21 insertions(+), 15 deletions(-) diff --git a/antsibull/extra_docs.py b/antsibull/extra_docs.py index 5ddb8e5d..e0153bd4 100644 --- a/antsibull/extra_docs.py +++ b/antsibull/extra_docs.py @@ -36,7 +36,7 @@ def __init__(self, title: str, toctree: t.List[str]): #: A tuple consisting of a list of sections and a list of RST documents as tuples -#: (relative path in docs/docsite/rst, content). +#: (absolute path to source file, relative path in collection's docs directory). CollectionExtraDocsInfoT = t.Tuple[t.List[Section], t.List[t.Tuple[str, str]]] @@ -162,7 +162,7 @@ def load_extra_docs_index(index_path: str) -> t.Tuple[t.List[Section], t.List[st async def load_collection_extra_docs(collection_name: str, collection_path: str, - path_prefix: str = 'docsite/' + path_prefix: str = 'docsite' ) -> CollectionExtraDocsInfoT: '''Given a collection name and collection metadata, load extra docs data. @@ -183,12 +183,12 @@ async def load_collection_extra_docs(collection_name: str, for section in sections: for i, toctree in enumerate(section.toctree): - section.toctree[i] = path_prefix + toctree + section.toctree[i] = f"{path_prefix}/{toctree}" documents = [] - for doc in find_extra_docs(collection_path): + for abs_path, rel_path in find_extra_docs(collection_path): try: # Load content - async with aiofiles.open(doc[0], 'r', encoding='utf-8') as f: + async with aiofiles.open(abs_path, 'r', encoding='utf-8') as f: content = await f.read() # Lint content @@ -196,7 +196,7 @@ async def load_collection_extra_docs(collection_name: str, # When no errors were found, add to output if not errors: - documents.append((path_prefix + doc[1], content)) + documents.append((abs_path, os.path.join(path_prefix, rel_path))) except Exception: pass diff --git a/antsibull/write_docs.py b/antsibull/write_docs.py index d5fdd24c..d836f71e 100644 --- a/antsibull/write_docs.py +++ b/antsibull/write_docs.py @@ -38,17 +38,23 @@ ADD_TOCTREES = True -async def write_file(path: str, content: str) -> None: +async def copy_file(source_path: str, dest_path: str) -> None: """ - Write content to a given path. + Copy content from one file to another. - :arg path: Path of the file to write. - :arg content: Content to write into the file. + Note that this implementation is somewhat naive: it reads the whole content of the source file + and then proceeds to write it to the destination file. + + :arg source_path: Source path. Must be a file. + :arg dest_path: Destination path. """ - flog = mlog.fields(func='write_file') + flog = mlog.fields(func='copy_file') flog.debug('Enter') - async with aiofiles.open(path, 'w') as f: + async with aiofiles.open(source_path, 'rb') as f: + content = await f.read() + + async with aiofiles.open(dest_path, 'wb') as f: await f.write(content) flog.debug('Leave') @@ -567,10 +573,10 @@ async def output_extra_docs(dest_dir: str, collection_dir = os.path.join(collection_toplevel, *(collection_name.split('.'))) else: collection_dir = collection_toplevel - for path, content in documents: - full_path = os.path.join(collection_dir, path) + for source_path, rel_path in documents: + full_path = os.path.join(collection_dir, rel_path) os.makedirs(os.path.dirname(full_path), mode=0o755, exist_ok=True) - writers.append(await pool.spawn(write_file(full_path, content))) + writers.append(await pool.spawn(copy_file(source_path, full_path))) await asyncio.gather(*writers)