|
| 1 | +# ----------------------------------------------------------------------------- |
| 2 | +# Copyright (c) Microsoft Corporation. All rights reserved. |
| 3 | +# Licensed under the MIT License. See License.txt in the project root for |
| 4 | +# license information. |
| 5 | +# ----------------------------------------------------------------------------- |
| 6 | + |
| 7 | +from __future__ import print_function |
| 8 | + |
| 9 | +import os |
| 10 | +import sys |
| 11 | +import copy |
| 12 | +import json |
| 13 | +import shutil |
| 14 | +import tempfile |
| 15 | + |
| 16 | +from subprocess import check_call, CalledProcessError |
| 17 | + |
| 18 | +from knack.util import CLIError |
| 19 | +from knack.log import get_logger |
| 20 | + |
| 21 | +from azdev.utilities import ( |
| 22 | + display, heading, subheading, |
| 23 | + get_cli_repo_path, get_path_table |
| 24 | +) |
| 25 | + |
| 26 | +from azdev.utilities.tools import require_azure_cli |
| 27 | +from azure.cli.core.extension.operations import list_available_extensions # pylint: disable=import-error |
| 28 | + |
| 29 | +DOC_MAP_NAME = 'doc_source_map.json' |
| 30 | +HELP_FILE_NAME = '_help.py' |
| 31 | +DOC_SOURCE_MAP_PATH = os.path.join('doc', 'sphinx', 'azhelpgen', DOC_MAP_NAME) |
| 32 | + |
| 33 | +_logger = get_logger(__name__) |
| 34 | + |
| 35 | + |
| 36 | +def check_document_map(): |
| 37 | + |
| 38 | + heading('Verify Document Map') |
| 39 | + |
| 40 | + cli_repo = get_cli_repo_path() |
| 41 | + |
| 42 | + map_path = os.path.join(cli_repo, DOC_SOURCE_MAP_PATH) |
| 43 | + help_files_in_map = _get_help_files_in_map(map_path) |
| 44 | + help_files_not_found = _map_help_files_not_found(cli_repo, help_files_in_map) |
| 45 | + help_files_to_add_to_map = _help_files_not_in_map(cli_repo, help_files_in_map) |
| 46 | + |
| 47 | + subheading('Results') |
| 48 | + if help_files_not_found or help_files_to_add_to_map: |
| 49 | + error_lines = [] |
| 50 | + error_lines.append('Errors whilst verifying {}!'.format(DOC_MAP_NAME)) |
| 51 | + if help_files_not_found: |
| 52 | + error_lines.append('The following files are in {} but do not exist:'.format(DOC_MAP_NAME)) |
| 53 | + error_lines += help_files_not_found |
| 54 | + if help_files_to_add_to_map: |
| 55 | + error_lines.append('The following files should be added to {}:'.format(DOC_MAP_NAME)) |
| 56 | + error_lines += help_files_to_add_to_map |
| 57 | + error_msg = '\n'.join(error_lines) |
| 58 | + raise CLIError(error_msg) |
| 59 | + display('Verified {} OK.'.format(DOC_MAP_NAME)) |
| 60 | + |
| 61 | + |
| 62 | +def _get_help_files_in_map(map_path): |
| 63 | + with open(map_path) as json_file: |
| 64 | + json_data = json.load(json_file) |
| 65 | + return [os.path.normpath(x) for x in list(json_data.values())] |
| 66 | + |
| 67 | + |
| 68 | +def _map_help_files_not_found(cli_repo, help_files_in_map): |
| 69 | + missing_files = [] |
| 70 | + for path in help_files_in_map: |
| 71 | + if not os.path.isfile(os.path.normpath(os.path.join(cli_repo, path))): |
| 72 | + missing_files.append(path) |
| 73 | + return missing_files |
| 74 | + |
| 75 | + |
| 76 | +def _help_files_not_in_map(cli_repo, help_files_in_map): |
| 77 | + not_in_map = [] |
| 78 | + for _, path in get_path_table()['mod'].items(): |
| 79 | + help_path = os.path.join(path, HELP_FILE_NAME) |
| 80 | + help_path = help_path.replace(cli_repo.lower() + os.sep, '') |
| 81 | + if help_path in help_files_in_map or not os.path.isfile(help_path): |
| 82 | + continue |
| 83 | + not_in_map.append(help_path) |
| 84 | + return not_in_map |
| 85 | + |
| 86 | + |
| 87 | +def generate_cli_ref_docs(output_dir=None, output_type=None): |
| 88 | + # require that azure cli installed |
| 89 | + require_azure_cli() |
| 90 | + output_dir = _process_ref_doc_output_dir(output_dir) |
| 91 | + |
| 92 | + heading('Generate CLI Reference Docs') |
| 93 | + display("Docs will be placed in {}.".format(output_dir)) |
| 94 | + |
| 95 | + # Generate documentation for all comamnds |
| 96 | + _call_sphinx_build(output_type, output_dir) |
| 97 | + |
| 98 | + display("\nThe {} files are in {}".format(output_type, output_dir)) |
| 99 | + |
| 100 | + |
| 101 | +def generate_extension_ref_docs(output_dir=None, output_type=None): |
| 102 | + # require that azure cli installed |
| 103 | + require_azure_cli() |
| 104 | + output_dir = _process_ref_doc_output_dir(output_dir) |
| 105 | + |
| 106 | + heading('Generate CLI Extensions Reference Docs') |
| 107 | + display("Docs will be placed in {}.".format(output_dir)) |
| 108 | + |
| 109 | + display("Generating Docs for public extensions. Installed extensions will not be affected...") |
| 110 | + _generate_ref_docs_for_public_exts(output_type, output_dir) |
| 111 | + |
| 112 | + display("\nThe {} files are in {}".format(output_type, output_dir)) |
| 113 | + |
| 114 | + |
| 115 | +def _process_ref_doc_output_dir(output_dir): |
| 116 | + # handle output_dir |
| 117 | + # if non specified, store in "_build" in the current working directory |
| 118 | + if not output_dir: |
| 119 | + output_dir = tempfile.mkdtemp(prefix="doc_output_") |
| 120 | + # ensure output_dir exists otherwise create it |
| 121 | + output_dir = os.path.abspath(output_dir) |
| 122 | + if not os.path.exists(output_dir): |
| 123 | + existing_path = os.path.dirname(output_dir) |
| 124 | + base_dir = os.path.basename(output_dir) |
| 125 | + if not os.path.exists(existing_path): |
| 126 | + raise CLIError("Cannot create output directory {} in non-existent path {}." |
| 127 | + .format(base_dir, existing_path)) |
| 128 | + |
| 129 | + os.mkdir(output_dir) |
| 130 | + return output_dir |
| 131 | + |
| 132 | + |
| 133 | +def _generate_ref_docs_for_public_exts(output_type, base_output_dir): |
| 134 | + # TODO: this shouldn't define the env key, but should reference it from a central place in the cli repo. |
| 135 | + ENV_KEY_AZURE_EXTENSION_DIR = 'AZURE_EXTENSION_DIR' |
| 136 | + |
| 137 | + extensions_url_tups = _get_available_extension_urls() |
| 138 | + if not extensions_url_tups: |
| 139 | + raise CLIError("Failed to retrieve public extensions.") |
| 140 | + |
| 141 | + temp_dir = tempfile.mkdtemp(prefix="temp_whl_ext_dir") |
| 142 | + _logger.debug("Created temp directory to store downloaded whl files: %s", temp_dir) |
| 143 | + |
| 144 | + try: |
| 145 | + for name, file_name, download_url in extensions_url_tups: |
| 146 | + # for every compatible public extensions |
| 147 | + # download the whl file |
| 148 | + whl_file_name = _get_whl_from_url(download_url, file_name, temp_dir) |
| 149 | + |
| 150 | + # install the whl file in a new temp directory |
| 151 | + installed_ext_dir = tempfile.mkdtemp(prefix="temp_extension_dir_", dir=temp_dir) |
| 152 | + _logger.debug("Created temp directory %s to use as the extension installation dir for %s extension.", |
| 153 | + installed_ext_dir, name) |
| 154 | + pip_cmd = [sys.executable, '-m', 'pip', 'install', '--target', |
| 155 | + os.path.join(installed_ext_dir, 'extension'), |
| 156 | + whl_file_name, '--disable-pip-version-check', '--no-cache-dir'] |
| 157 | + display('Executing "{}"'.format(' '.join(pip_cmd))) |
| 158 | + check_call(pip_cmd) |
| 159 | + |
| 160 | + # set the directory as the extension directory in the environment used to call sphinx-build |
| 161 | + env = os.environ.copy() |
| 162 | + env[ENV_KEY_AZURE_EXTENSION_DIR] = installed_ext_dir |
| 163 | + # generate documentation for installed extensions |
| 164 | + |
| 165 | + ext_output_dir = os.path.join(base_output_dir, name) |
| 166 | + os.makedirs(ext_output_dir) |
| 167 | + _call_sphinx_build(output_type, ext_output_dir, for_extensions_alone=True, call_env=env, |
| 168 | + msg="\nGenerating ref docs for {}".format(name)) |
| 169 | + finally: |
| 170 | + # finally delete the temp dir |
| 171 | + shutil.rmtree(temp_dir) |
| 172 | + _logger.debug("Deleted temp whl extension directory: %s", temp_dir) |
| 173 | + |
| 174 | + |
| 175 | +def _call_sphinx_build(builder_name, output_dir, for_extensions_alone=False, call_env=None, msg=""): |
| 176 | + conf_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'refdoc') |
| 177 | + |
| 178 | + if for_extensions_alone: |
| 179 | + source_dir = os.path.abspath(os.path.join(conf_dir, 'extension_docs')) |
| 180 | + else: |
| 181 | + source_dir = os.path.abspath(os.path.join(conf_dir, 'cli_docs')) |
| 182 | + |
| 183 | + try: |
| 184 | + opts = ['-E', '-b', builder_name, '-c', conf_dir] |
| 185 | + args = [source_dir, output_dir] |
| 186 | + if for_extensions_alone: |
| 187 | + # apparently the configuration in extensions and core CLI differed in this way. This is only cosmetic |
| 188 | + # set smartquotes to false. Due to a bug, one has to use "0" instead "False" |
| 189 | + opts.extend(["-D", "smartquotes=0"]) |
| 190 | + |
| 191 | + sphinx_cmd = ['sphinx-build'] + opts + args |
| 192 | + display("sphinx cmd: {}".format(" ".join(sphinx_cmd))) |
| 193 | + display(msg) |
| 194 | + # call sphinx-build |
| 195 | + check_call(sphinx_cmd, stdout=sys.stdout, stderr=sys.stderr, env=call_env) |
| 196 | + |
| 197 | + except CalledProcessError: |
| 198 | + raise CLIError("Doc generation failed.") |
| 199 | + |
| 200 | + |
| 201 | +# Todo, this would be unnecessary if list_available_extensions has a switch for including download urls.... |
| 202 | +def _get_available_extension_urls(): |
| 203 | + """ Get download urls for all the CLI extensions compatible with the installed development CLI. |
| 204 | +
|
| 205 | + :return: list of 3-tuples in the form of '(extension_name, extension_file_name, extensions_download_url)' |
| 206 | + """ |
| 207 | + all_pub_extensions = list_available_extensions(show_details=True) |
| 208 | + compatible_extensions = list_available_extensions() |
| 209 | + |
| 210 | + name_url_tups = [] |
| 211 | + |
| 212 | + for ext in compatible_extensions: |
| 213 | + old_length = len(name_url_tups) |
| 214 | + ext_name, ext_version = ext["name"], ext["version"] |
| 215 | + |
| 216 | + for ext_info in all_pub_extensions[ext_name]: |
| 217 | + if ext_version == ext_info["metadata"]["version"]: |
| 218 | + name_url_tups.append((ext_name, ext_info["filename"], ext_info["downloadUrl"])) |
| 219 | + break |
| 220 | + |
| 221 | + if old_length == len(name_url_tups): |
| 222 | + _logger.warning("'%s' has no versions compatible with the installed CLI's version", ext_name) |
| 223 | + |
| 224 | + return name_url_tups |
| 225 | + |
| 226 | + |
| 227 | +def _get_whl_from_url(url, filename, tmp_dir, whl_cache=None): |
| 228 | + if not whl_cache: |
| 229 | + whl_cache = {} |
| 230 | + if url in whl_cache: |
| 231 | + return whl_cache[url] |
| 232 | + import requests |
| 233 | + r = requests.get(url, stream=True) |
| 234 | + assert r.status_code == 200, "Request to {} failed with {}".format(url, r.status_code) |
| 235 | + ext_file = os.path.join(tmp_dir, filename) |
| 236 | + with open(ext_file, 'wb') as f: |
| 237 | + for chunk in r.iter_content(chunk_size=1024): |
| 238 | + if chunk: # ignore keep-alive new chunks |
| 239 | + f.write(chunk) |
| 240 | + whl_cache[url] = ext_file |
| 241 | + return ext_file |
0 commit comments