|
| 1 | +# Author: Toshio Kuratomi <[email protected]> |
| 2 | +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or |
| 3 | +# https://www.gnu.org/licenses/gpl-3.0.txt) |
| 4 | +# SPDX-License-Identifier: GPL-3.0-or-later |
| 5 | +# SPDX-FileCopyrightText: 2020, Ansible Project |
| 6 | +"""Output collection documentation.""" |
| 7 | + |
| 8 | +import asyncio |
| 9 | +import os |
| 10 | +import typing as t |
| 11 | + |
| 12 | +import asyncio_pool # type: ignore[import] |
| 13 | +from antsibull_core import app_context |
| 14 | +from antsibull_core.logging import log |
| 15 | +from antsibull_core.utils.io import copy_file, write_file |
| 16 | +from jinja2 import Template |
| 17 | +from packaging.specifiers import SpecifierSet |
| 18 | + |
| 19 | +from ..collection_links import CollectionLinks |
| 20 | +from ..docs_parsing import AnsibleCollectionMetadata |
| 21 | +from ..extra_docs import CollectionExtraDocsInfoT |
| 22 | +from ..jinja2.environment import doc_environment |
| 23 | +from ..utils.collection_name_transformer import CollectionNameTransformer |
| 24 | +from . import CollectionInfoT, _render_template |
| 25 | + |
| 26 | +mlog = log.fields(mod=__name__) |
| 27 | + |
| 28 | + |
| 29 | +def _parse_required_ansible(requires_ansible: str) -> t.List[str]: |
| 30 | + result = [] |
| 31 | + for specifier in reversed(sorted( |
| 32 | + SpecifierSet(requires_ansible), |
| 33 | + key=lambda specifier: (specifier.operator, specifier.version) |
| 34 | + )): |
| 35 | + if specifier.operator == '>=': |
| 36 | + result.append(f'{specifier.version} or newer') |
| 37 | + elif specifier.operator == '>': |
| 38 | + result.append(f'newer than {specifier.version}') |
| 39 | + elif specifier.operator == '<=': |
| 40 | + result.append(f'{specifier.version} or older') |
| 41 | + elif specifier.operator == '<': |
| 42 | + result.append(f'older than {specifier.version}') |
| 43 | + elif specifier.operator == '!=': |
| 44 | + result.append(f'version {specifier.version} is specifically not supported') |
| 45 | + elif specifier.operator == '==': |
| 46 | + result.append(f'version {specifier.version} is specifically supported') |
| 47 | + else: |
| 48 | + result.append(f'{specifier.operator} {specifier.version}') |
| 49 | + return result |
| 50 | + |
| 51 | + |
| 52 | +async def write_plugin_lists(collection_name: str, |
| 53 | + plugin_maps: t.Mapping[str, t.Mapping[str, str]], |
| 54 | + template: Template, |
| 55 | + dest_dir: str, |
| 56 | + collection_meta: AnsibleCollectionMetadata, |
| 57 | + extra_docs_data: CollectionExtraDocsInfoT, |
| 58 | + link_data: CollectionLinks, |
| 59 | + breadcrumbs: bool = True, |
| 60 | + for_official_docsite: bool = False, |
| 61 | + squash_hierarchy: bool = False) -> None: |
| 62 | + """ |
| 63 | + Write an index page for each collection. |
| 64 | +
|
| 65 | + The per-collection index page links to plugins for each collection. |
| 66 | +
|
| 67 | + :arg plugin_maps: Mapping of plugin_type to Mapping of plugin_name to short_description. |
| 68 | + :arg template: A template to render the collection index. |
| 69 | + :arg dest_dir: The destination directory to output the index into. |
| 70 | + :arg collection_meta: Metadata for the collection. |
| 71 | + :arg extra_docs_data: Extra docs data for the collection. |
| 72 | + :arg link_data: Links for the collection. |
| 73 | + :kwarg breadcrumbs: Default True. Set to False if breadcrumbs for collections should be |
| 74 | + disabled. This will disable breadcrumbs but save on memory usage. |
| 75 | + :kwarg for_official_docsite: Default False. Set to True to use wording specific for the |
| 76 | + official docsite on docs.ansible.com. |
| 77 | + :kwarg squash_hierarchy: If set to ``True``, no directory hierarchy will be used. |
| 78 | + Undefined behavior if documentation for multiple collections are created. |
| 79 | + """ |
| 80 | + flog = mlog.fields(func='write_plugin_lists') |
| 81 | + flog.debug('Enter') |
| 82 | + |
| 83 | + requires_ansible = [] |
| 84 | + if collection_name != 'ansible.builtin' and collection_meta.requires_ansible: |
| 85 | + try: |
| 86 | + requires_ansible = _parse_required_ansible(collection_meta.requires_ansible) |
| 87 | + except Exception as exc: # pylint:disable=broad-except |
| 88 | + flog.fields( |
| 89 | + collection_name=collection_name, |
| 90 | + exception=exc, |
| 91 | + ).error( |
| 92 | + 'Cannot parse required_ansible specifier set for {collection_name}', |
| 93 | + collection_name=collection_name, |
| 94 | + ) |
| 95 | + index_contents = _render_template( |
| 96 | + template, |
| 97 | + dest_dir, |
| 98 | + collection_name=collection_name, |
| 99 | + plugin_maps=plugin_maps, |
| 100 | + collection_version=collection_meta.version, |
| 101 | + requires_ansible=requires_ansible, |
| 102 | + link_data=link_data, |
| 103 | + breadcrumbs=breadcrumbs, |
| 104 | + extra_docs_sections=extra_docs_data[0], |
| 105 | + collection_authors=link_data.authors, |
| 106 | + collection_description=link_data.description, |
| 107 | + collection_links=link_data.links, |
| 108 | + collection_communication=link_data.communication, |
| 109 | + for_official_docsite=for_official_docsite, |
| 110 | + squash_hierarchy=squash_hierarchy, |
| 111 | + ) |
| 112 | + |
| 113 | + # This is only safe because we made sure that the top of the directory tree we're writing to |
| 114 | + # (docs/docsite/rst) is only writable by us. |
| 115 | + os.makedirs(dest_dir, mode=0o755, exist_ok=True) |
| 116 | + index_file = os.path.join(dest_dir, 'index.rst') |
| 117 | + |
| 118 | + await write_file(index_file, index_contents) |
| 119 | + |
| 120 | + flog.debug('Leave') |
| 121 | + |
| 122 | + |
| 123 | +async def output_indexes(collection_to_plugin_info: CollectionInfoT, |
| 124 | + dest_dir: str, |
| 125 | + collection_metadata: t.Mapping[str, AnsibleCollectionMetadata], |
| 126 | + extra_docs_data: t.Mapping[str, CollectionExtraDocsInfoT], |
| 127 | + link_data: t.Mapping[str, CollectionLinks], |
| 128 | + collection_url: CollectionNameTransformer, |
| 129 | + collection_install: CollectionNameTransformer, |
| 130 | + squash_hierarchy: bool = False, |
| 131 | + breadcrumbs: bool = True, |
| 132 | + for_official_docsite: bool = False) -> None: |
| 133 | + """ |
| 134 | + Generate collection-level index pages for the collections. |
| 135 | +
|
| 136 | + :arg collection_to_plugin_info: Mapping of collection_name to Mapping of plugin_type to |
| 137 | + Mapping of plugin_name to short_description. |
| 138 | + :arg dest_dir: The directory to place the documentation in. |
| 139 | + :arg collection_metadata: Dictionary mapping collection names to collection metadata objects. |
| 140 | + :arg extra_docs_data: Dictionary mapping collection names to CollectionExtraDocsInfoT. |
| 141 | + :arg link_data: Dictionary mapping collection names to CollectionLinks. |
| 142 | + :kwarg squash_hierarchy: If set to ``True``, no directory hierarchy will be used. |
| 143 | + Undefined behavior if documentation for multiple collections are created. |
| 144 | + :kwarg breadcrumbs: Default True. Set to False if breadcrumbs for collections should be |
| 145 | + disabled. This will disable breadcrumbs but save on memory usage. |
| 146 | + :kwarg for_official_docsite: Default False. Set to True to use wording specific for the |
| 147 | + official docsite on docs.ansible.com. |
| 148 | + """ |
| 149 | + flog = mlog.fields(func='output_indexes') |
| 150 | + flog.debug('Enter') |
| 151 | + |
| 152 | + if collection_metadata is None: |
| 153 | + collection_metadata = {} |
| 154 | + |
| 155 | + env = doc_environment( |
| 156 | + ('antsibull_docs.data', 'docsite'), |
| 157 | + collection_url=collection_url, |
| 158 | + collection_install=collection_install) |
| 159 | + # Get the templates |
| 160 | + collection_plugins_tmpl = env.get_template('plugins_by_collection.rst.j2') |
| 161 | + |
| 162 | + writers = [] |
| 163 | + lib_ctx = app_context.lib_ctx.get() |
| 164 | + |
| 165 | + if not squash_hierarchy: |
| 166 | + collection_toplevel = os.path.join(dest_dir, 'collections') |
| 167 | + flog.fields(toplevel=collection_toplevel, exists=os.path.isdir(collection_toplevel)).debug( |
| 168 | + 'collection_toplevel exists?') |
| 169 | + # This is only safe because we made sure that the top of the directory tree we're writing to |
| 170 | + # (docs/docsite/rst) is only writable by us. |
| 171 | + os.makedirs(collection_toplevel, mode=0o755, exist_ok=True) |
| 172 | + else: |
| 173 | + collection_toplevel = dest_dir |
| 174 | + |
| 175 | + async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: |
| 176 | + for collection_name, plugin_maps in collection_to_plugin_info.items(): |
| 177 | + if not squash_hierarchy: |
| 178 | + collection_dir = os.path.join(collection_toplevel, *(collection_name.split('.'))) |
| 179 | + else: |
| 180 | + collection_dir = collection_toplevel |
| 181 | + writers.append(await pool.spawn( |
| 182 | + write_plugin_lists(collection_name, plugin_maps, collection_plugins_tmpl, |
| 183 | + collection_dir, collection_metadata[collection_name], |
| 184 | + extra_docs_data[collection_name], |
| 185 | + link_data[collection_name], |
| 186 | + breadcrumbs=breadcrumbs, |
| 187 | + for_official_docsite=for_official_docsite, |
| 188 | + squash_hierarchy=squash_hierarchy))) |
| 189 | + |
| 190 | + await asyncio.gather(*writers) |
| 191 | + |
| 192 | + flog.debug('Leave') |
| 193 | + |
| 194 | + |
| 195 | +async def output_extra_docs(dest_dir: str, |
| 196 | + extra_docs_data: t.Mapping[str, CollectionExtraDocsInfoT], |
| 197 | + squash_hierarchy: bool = False) -> None: |
| 198 | + """ |
| 199 | + Write extra docs pages for the collections. |
| 200 | +
|
| 201 | + :arg dest_dir: The directory to place the documentation in. |
| 202 | + :arg extra_docs_data: Dictionary mapping collection names to CollectionExtraDocsInfoT. |
| 203 | + :arg squash_hierarchy: If set to ``True``, no directory hierarchy will be used. |
| 204 | + Undefined behavior if documentation for multiple collections are |
| 205 | + created. |
| 206 | + """ |
| 207 | + flog = mlog.fields(func='output_extra_docs') |
| 208 | + flog.debug('Enter') |
| 209 | + |
| 210 | + writers = [] |
| 211 | + lib_ctx = app_context.lib_ctx.get() |
| 212 | + |
| 213 | + if not squash_hierarchy: |
| 214 | + collection_toplevel = os.path.join(dest_dir, 'collections') |
| 215 | + else: |
| 216 | + collection_toplevel = dest_dir |
| 217 | + |
| 218 | + async with asyncio_pool.AioPool(size=lib_ctx.thread_max) as pool: |
| 219 | + for collection_name, (dummy, documents) in extra_docs_data.items(): |
| 220 | + if not squash_hierarchy: |
| 221 | + collection_dir = os.path.join(collection_toplevel, *(collection_name.split('.'))) |
| 222 | + else: |
| 223 | + collection_dir = collection_toplevel |
| 224 | + for source_path, rel_path in documents: |
| 225 | + full_path = os.path.join(collection_dir, rel_path) |
| 226 | + os.makedirs(os.path.dirname(full_path), mode=0o755, exist_ok=True) |
| 227 | + writers.append(await pool.spawn(copy_file(source_path, full_path))) |
| 228 | + |
| 229 | + await asyncio.gather(*writers) |
| 230 | + |
| 231 | + flog.debug('Leave') |
0 commit comments