From 36efb987e1e9d763ba727da6f8ff86d76a433441 Mon Sep 17 00:00:00 2001 From: Jay Aluru Date: Sun, 3 Nov 2024 16:54:02 -0800 Subject: [PATCH 1/2] Removing msrestazure from AZFW --- src/azure-firewall/HISTORY.rst | 4 + .../azext_firewall/_exception_handler.py | 4 +- .../azext_firewall/_validators.py | 14 +- src/azure-firewall/azext_firewall/custom.py | 2 +- src/scripts/automation/build_package.py | 34 ++ src/scripts/ci/avail-ext-doc/README.md | 3 + src/scripts/ci/avail-ext-doc/list-template.md | 28 ++ src/scripts/ci/avail-ext-doc/requirements.txt | 3 + .../ci/avail-ext-doc/update_extension_list.py | 81 ++++ src/scripts/ci/azdev_linter_style.py | 293 ++++++++++++++ src/scripts/ci/breaking_change_test.py | 208 ++++++++++ src/scripts/ci/build_ext_cmd_tree.sh | 43 ++ src/scripts/ci/codegen_cal.py | 96 +++++ .../ci/credscan/CredScanSuppressions.json | 299 ++++++++++++++ src/scripts/ci/index_ref_doc.py | 84 ++++ src/scripts/ci/release_version_cal.py | 378 ++++++++++++++++++ src/scripts/ci/service_name.py | 127 ++++++ src/scripts/ci/sync_extensions.py | 208 ++++++++++ src/scripts/ci/sync_extensions.sh | 8 + src/scripts/ci/test_index.py | 217 ++++++++++ src/scripts/ci/test_index_ref_doc.sh | 13 + src/scripts/ci/test_init.py | 65 +++ src/scripts/ci/test_source.py | 107 +++++ src/scripts/ci/update_ext_cmd_tree.py | 112 ++++++ src/scripts/ci/update_index.py | 68 ++++ src/scripts/ci/util.py | 165 ++++++++ src/scripts/ci/verify_codeowners.py | 42 ++ src/scripts/refdoc/README.md | 26 ++ src/scripts/refdoc/azhelpgen/__init__.py | 4 + src/scripts/refdoc/azhelpgen/azhelpgen.py | 190 +++++++++ src/scripts/refdoc/cligroup/__init__.py | 4 + src/scripts/refdoc/cligroup/cligroup.py | 73 ++++ src/scripts/refdoc/conf.py | 42 ++ src/scripts/refdoc/generate.py | 80 ++++ src/scripts/refdoc/ind.rst | 2 + src/scripts/run_az.py | 10 + 36 files changed, 3127 insertions(+), 10 deletions(-) create mode 100644 src/scripts/automation/build_package.py create mode 100644 src/scripts/ci/avail-ext-doc/README.md create mode 100644 src/scripts/ci/avail-ext-doc/list-template.md create mode 100644 src/scripts/ci/avail-ext-doc/requirements.txt create mode 100644 src/scripts/ci/avail-ext-doc/update_extension_list.py create mode 100644 src/scripts/ci/azdev_linter_style.py create mode 100644 src/scripts/ci/breaking_change_test.py create mode 100644 src/scripts/ci/build_ext_cmd_tree.sh create mode 100644 src/scripts/ci/codegen_cal.py create mode 100644 src/scripts/ci/credscan/CredScanSuppressions.json create mode 100644 src/scripts/ci/index_ref_doc.py create mode 100644 src/scripts/ci/release_version_cal.py create mode 100644 src/scripts/ci/service_name.py create mode 100644 src/scripts/ci/sync_extensions.py create mode 100644 src/scripts/ci/sync_extensions.sh create mode 100644 src/scripts/ci/test_index.py create mode 100644 src/scripts/ci/test_index_ref_doc.sh create mode 100644 src/scripts/ci/test_init.py create mode 100644 src/scripts/ci/test_source.py create mode 100644 src/scripts/ci/update_ext_cmd_tree.py create mode 100644 src/scripts/ci/update_index.py create mode 100644 src/scripts/ci/util.py create mode 100644 src/scripts/ci/verify_codeowners.py create mode 100644 src/scripts/refdoc/README.md create mode 100644 src/scripts/refdoc/azhelpgen/__init__.py create mode 100644 src/scripts/refdoc/azhelpgen/azhelpgen.py create mode 100644 src/scripts/refdoc/cligroup/__init__.py create mode 100644 src/scripts/refdoc/cligroup/cligroup.py create mode 100644 src/scripts/refdoc/conf.py create mode 100644 src/scripts/refdoc/generate.py create mode 100644 src/scripts/refdoc/ind.rst create mode 100644 src/scripts/run_az.py diff --git a/src/azure-firewall/HISTORY.rst b/src/azure-firewall/HISTORY.rst index 7e1e9d89065..1d7c71e8acd 100644 --- a/src/azure-firewall/HISTORY.rst +++ b/src/azure-firewall/HISTORY.rst @@ -2,6 +2,10 @@ Release History =============== +1.2.1 +++++++ +* Remove msrestazure dependency + 1.2.0 ++++++ * `network firewall policy rule-collection-group collection rule add/update"`: Add parameter `--http-headers-to-insert` diff --git a/src/azure-firewall/azext_firewall/_exception_handler.py b/src/azure-firewall/azext_firewall/_exception_handler.py index fcd2e326e25..d89dc9e4bc0 100644 --- a/src/azure-firewall/azext_firewall/_exception_handler.py +++ b/src/azure-firewall/azext_firewall/_exception_handler.py @@ -5,8 +5,8 @@ def exception_handler(ex): - from msrestazure.azure_exceptions import CloudError - if isinstance(ex, CloudError): + from azure.core.exceptions import HttpResponseError + if isinstance(ex, HttpResponseError): text = getattr(ex.response, 'text', '') if len(ex.args) == 1 and isinstance(ex.args[0], str): ex.args = tuple([ex.args[0] + text]) diff --git a/src/azure-firewall/azext_firewall/_validators.py b/src/azure-firewall/azext_firewall/_validators.py index da05e2035f8..494867479c6 100644 --- a/src/azure-firewall/azext_firewall/_validators.py +++ b/src/azure-firewall/azext_firewall/_validators.py @@ -22,7 +22,7 @@ def validate_application_rule_protocols(namespace): def validate_ip_groups(cmd, namespace): - from msrestazure.tools import is_valid_resource_id, resource_id + from azure.mgmt.core.tools import is_valid_resource_id, resource_id def _validate_name_or_id(ip_group, subscription): # determine if public_ip_address is name or ID @@ -46,7 +46,7 @@ def _validate_name_or_id(ip_group, subscription): def get_public_ip_validator(): """ Retrieves a validator for public IP address. Accepting all defaults will perform a check for an existing name or ID with no ARM-required -type parameter. """ - from msrestazure.tools import is_valid_resource_id, resource_id + from azure.mgmt.core.tools import is_valid_resource_id, resource_id def simple_validator(cmd, namespace): if namespace.public_ip_address: @@ -72,7 +72,7 @@ def _validate_name_or_id(public_ip): def get_subnet_validator(): - from msrestazure.tools import is_valid_resource_id, resource_id + from azure.mgmt.core.tools import is_valid_resource_id, resource_id def simple_validator(cmd, namespace): if namespace.virtual_network_name is None: @@ -96,7 +96,7 @@ def simple_validator(cmd, namespace): def get_management_subnet_validator(): - from msrestazure.tools import is_valid_resource_id, resource_id + from azure.mgmt.core.tools import is_valid_resource_id, resource_id from knack.util import CLIError def simple_validator(cmd, namespace): @@ -130,7 +130,7 @@ def simple_validator(cmd, namespace): def get_management_public_ip_validator(): """ Retrieves a validator for public IP address. Accepting all defaults will perform a check for an existing name or ID with no ARM-required -type parameter. """ - from msrestazure.tools import is_valid_resource_id, resource_id + from azure.mgmt.core.tools import is_valid_resource_id, resource_id def simple_validator(cmd, namespace): if namespace.management_public_ip_address: @@ -156,7 +156,7 @@ def _validate_name_or_id(public_ip): def validate_firewall_policy(cmd, namespace): - from msrestazure.tools import is_valid_resource_id, resource_id + from azure.mgmt.core.tools import is_valid_resource_id, resource_id if hasattr(namespace, 'base_policy') and namespace.base_policy is not None: if not is_valid_resource_id(namespace.base_policy): @@ -178,7 +178,7 @@ def validate_firewall_policy(cmd, namespace): def validate_virtual_hub(cmd, namespace): - from msrestazure.tools import is_valid_resource_id, resource_id + from azure.mgmt.core.tools import is_valid_resource_id, resource_id if hasattr(namespace, 'virtual_hub') and namespace.virtual_hub is not None: diff --git a/src/azure-firewall/azext_firewall/custom.py b/src/azure-firewall/azext_firewall/custom.py index bea8fcb87d1..e740d7baf56 100644 --- a/src/azure-firewall/azext_firewall/custom.py +++ b/src/azure-firewall/azext_firewall/custom.py @@ -13,7 +13,7 @@ from azure.cli.core.util import sdk_no_wait from azure.cli.core.azclierror import UserFault, ServiceError, ValidationError, ArgumentUsageError from azure.cli.core.commands.client_factory import get_subscription_id -from msrestazure.tools import resource_id +from azure.mgmt.core.tools import resource_id from ._client_factory import network_client_factory from .aaz.latest.network.firewall import Create as _AzureFirewallCreate, Update as _AzureFirewallUpdate, \ Show as _AzureFirewallShow diff --git a/src/scripts/automation/build_package.py b/src/scripts/automation/build_package.py new file mode 100644 index 00000000000..7e72d0c7360 --- /dev/null +++ b/src/scripts/automation/build_package.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python + +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +#-------------------------------------------------------------------------- + +import argparse +import os +import glob +from subprocess import check_call + +DEFAULT_DEST_FOLDER = "./dist" + +def create_package(name, dest_folder=DEFAULT_DEST_FOLDER): + # a package will exist in either one, or the other folder. this is why we can resolve both at the same time. + absdirs = [os.path.dirname(package) for package in (glob.glob('{}/setup.py'.format(name)) + glob.glob('sdk/*/{}/setup.py'.format(name)))] + absdirpath = os.path.abspath(absdirs[0]) + check_call(['python', 'setup.py', 'bdist_wheel', '-d', dest_folder], cwd=absdirpath) + check_call(['python', 'setup.py', "sdist", "--format", "zip", '-d', dest_folder], cwd=absdirpath) + +if __name__ == '__main__': + """ + This file is used for Swagger CLI extension automation to build the wheel file and zip file + """ + parser = argparse.ArgumentParser(description='Build Azure package.') + parser.add_argument('name', help='The package name') + parser.add_argument('--dest', '-d', default=DEFAULT_DEST_FOLDER, + help='Destination folder. Relative to the package dir. [default: %(default)s]') + args = parser.parse_args() + create_package(args.name, args.dest) + + diff --git a/src/scripts/ci/avail-ext-doc/README.md b/src/scripts/ci/avail-ext-doc/README.md new file mode 100644 index 00000000000..34066002d02 --- /dev/null +++ b/src/scripts/ci/avail-ext-doc/README.md @@ -0,0 +1,3 @@ +This scripts is used in a Pipeline named **Azure CLI Extensions Sync** of Azure DevOps. + +It's for syncing available extensions list to Microsoft/azure-cli-docs. diff --git a/src/scripts/ci/avail-ext-doc/list-template.md b/src/scripts/ci/avail-ext-doc/list-template.md new file mode 100644 index 00000000000..2738e5d4576 --- /dev/null +++ b/src/scripts/ci/avail-ext-doc/list-template.md @@ -0,0 +1,28 @@ +--- +title: List of available Azure CLI extensions | Microsoft Docs +description: A complete list of officially supported Azure Command-Line Interface (CLI) extensions that are provided and maintained by Microsoft. +author: haroldrandom +ms.author: jianzen +manager: yonzhan,yungezz +ms.date: {{ date }} +ms.topic: article +ms.service: azure-cli +ms.devlang: azure-cli +ms.tool: azure-cli +ms.custom: devx-track-azurecli +keywords: az extension, azure cli extensions, azure extensions +--- + +# Available Azure CLI extensions + +This article is a complete list of the available extensions for the Azure CLI which are supported by Microsoft. The list of extensions is also available from the CLI. To get it, run [az extension list-available](/cli/azure/extension#az-extension-list-available): + +```azurecli-interactive +az extension list-available --output table +``` + +You will be prompted to install an extension on first use. + +| Extension | Required Minimum CLI Version | Description | Status | Release Notes | +|----|-----------------|-------------|---------|---------------|{% for extension in extensions %} +|[{{ extension.name }}]({{ extension.project_url }}) | {{ extension.min_cli_core_version }} | {{ extension.desc }} | {{ extension.status }} | [{{extension.version}}]({{extension.history}}) |{% endfor %} diff --git a/src/scripts/ci/avail-ext-doc/requirements.txt b/src/scripts/ci/avail-ext-doc/requirements.txt new file mode 100644 index 00000000000..e722f2a852b --- /dev/null +++ b/src/scripts/ci/avail-ext-doc/requirements.txt @@ -0,0 +1,3 @@ +Jinja2==3.0.3 +requests +wheel==0.31.1 diff --git a/src/scripts/ci/avail-ext-doc/update_extension_list.py b/src/scripts/ci/avail-ext-doc/update_extension_list.py new file mode 100644 index 00000000000..5eeb3701e63 --- /dev/null +++ b/src/scripts/ci/avail-ext-doc/update_extension_list.py @@ -0,0 +1,81 @@ + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +""" +This script must be run at the root of repo folder, which is azure-cli-extensions/ +It's used to update a file "azure-cli-extensions-list.md" of MicrosoftDocs/azure-cli-docs. +The file content is list of all available latest extensions. +""" + +import os +import sys + +import collections +import datetime +from pkg_resources import parse_version + +from jinja2 import Template # pylint: disable=import-error +import requests + +# After migration to OneBranch, clone azure-cli-extensions repo and azure-docs-cli repo are required. +# Also standardizes the directory structure: +# - $(System.DefaultWorkingDirectory) +# - azure-cli-extensions +# - azure-docs-cli +AZURE_CLI_EXTENSIONS_REPO_PATH = os.path.abspath(os.path.join('.', 'azure-cli-extensions')) +AZURE_DOCS_CLI_REPO_PATH = os.path.abspath(os.path.join('.', 'azure-docs-cli')) +AVAILABLE_EXTENSIONS_DOC = os.path.join(AZURE_DOCS_CLI_REPO_PATH, 'docs-ref-conceptual', 'azure-cli-extensions-list.md') +TEMPLATE_FILE = os.path.join(AZURE_CLI_EXTENSIONS_REPO_PATH, 'scripts', 'ci', 'avail-ext-doc', 'list-template.md') + +sys.path.insert(0, os.path.join(AZURE_CLI_EXTENSIONS_REPO_PATH, 'scripts')) +from ci.util import get_index_data, INDEX_PATH + + +def get_extensions(): + extensions = [] + index_extensions = collections.OrderedDict(sorted(get_index_data()['extensions'].items())) + for _, exts in index_extensions.items(): + # Get latest version + exts = sorted(exts, key=lambda c: parse_version(c['metadata']['version']), reverse=True) + + # some extension modules may not include 'HISTORY.rst' + project_url = exts[0]['metadata']['extensions']['python.details']['project_urls']['Home'] + history_tmp = project_url + '/HISTORY.rst' + history = project_url if str(requests.get(history_tmp).status_code) == '404' else history_tmp + if exts[0]['metadata'].get('azext.isPreview'): + status = 'Preview' + elif exts[0]['metadata'].get('azext.isExperimental'): + status = 'Experimental' + else: + status = 'GA' + + extensions.append({ + 'name': exts[0]['metadata']['name'], + 'desc': exts[0]['metadata']['summary'], + 'min_cli_core_version': exts[0]['metadata']['azext.minCliCoreVersion'], + 'version': exts[0]['metadata']['version'], + 'project_url': project_url, + 'history': history, + 'status': status + }) + return extensions + + +def update_extensions_list(output_file): + with open(TEMPLATE_FILE, 'r') as doc_template: + template = Template(doc_template.read()) + if template is None: + raise RuntimeError("Failed to read template file {}".format(TEMPLATE_FILE)) + with open(output_file, 'w') as output: + output.write(template.render(extensions=get_extensions(), date=datetime.date.today().strftime("%m/%d/%Y"))) + + +def main(): + update_extensions_list(AVAILABLE_EXTENSIONS_DOC) + + +if __name__ == '__main__': + main() diff --git a/src/scripts/ci/azdev_linter_style.py b/src/scripts/ci/azdev_linter_style.py new file mode 100644 index 00000000000..6b08712ec2d --- /dev/null +++ b/src/scripts/ci/azdev_linter_style.py @@ -0,0 +1,293 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +""" +This script is used to run azdev linter and azdev style on extensions. + +It's only working on ADO by default. If want to run locally, +please update the target branch/commit to find diff in function find_modified_files_against_master_branch() +""" +import json +import logging +import os +import re +import shutil +from subprocess import CalledProcessError, check_call, check_output + +import service_name +from pkg_resources import parse_version +from util import get_ext_metadata + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) +ch = logging.StreamHandler() +ch.setLevel(logging.DEBUG) +logger.addHandler(ch) + +def separator_line(): + logger.info('-' * 100) + + +class ModifiedFilesNotAllowedError(Exception): + """ + Exception raise for the scenario that modified files is conflict against publish requirement. + Scenario 1: if modified files contain only src/index.json, don't raise + Scenario 2: if modified files contain not only extension code but also src/index.json, raise. + Scenario 3: if modified files don't contain src/index.json, don't raise. + """ + + def __str__(self): + msg = """ + --------------------------------------------------------------------------------------------------------- + You have modified both source code and src/index.json! + + There is a release pipeline will help you to build, upload and publish your extension. + Once your PR is merged into master branch, a new PR will be created to update src/index.json automatically. + + If you want us to help to build, upload and publish your extension, src/index.json must not be modified. + --------------------------------------------------------------------------------------------------------- + """ + return msg + + +class AzExtensionHelper: + def __init__(self, extension_name): + self.extension_name = extension_name + + @staticmethod + def _cmd(cmd): + logger.info(cmd) + check_call(cmd, shell=True) + + def add_from_url(self, url): + self._cmd('az extension add -s {} -y'.format(url)) + + def remove(self): + self._cmd('az extension remove -n {}'.format(self.extension_name)) + + +class AzdevExtensionHelper: + def __init__(self, extension_name): + self.extension_name = extension_name + + @staticmethod + def _cmd(cmd): + logger.info(cmd) + check_call(cmd, shell=True) + + def add_from_code(self): + self._cmd('azdev extension add {}'.format(self.extension_name)) + + def remove(self): + self._cmd('azdev extension remove {}'.format(self.extension_name)) + + def linter(self): + self._cmd('azdev linter --include-whl-extensions {}'.format(self.extension_name)) + + def style(self): + self._cmd('azdev style {}'.format(self.extension_name)) + + def build(self): + self._cmd('azdev extension build {}'.format(self.extension_name)) + + def check_extension_name(self): + extension_root_dir_name = self.extension_name + original_cwd = os.getcwd() + dist_dir = os.path.join(original_cwd, 'dist') + files = os.listdir(dist_dir) + logger.info(f"wheel files in the dist directory: {files}") + for f in files: + if f.endswith('.whl'): + NAME_REGEX = r'(.*)-\d+.\d+.\d+' + extension_name = re.findall(NAME_REGEX, f)[0] + extension_name = extension_name.replace('_', '-') + logger.info(f"extension name is: {extension_name}") + ext_file = os.path.join(dist_dir, f) + break + metadata = get_ext_metadata(dist_dir, ext_file, extension_name) + pretty_metadata = json.dumps(metadata, indent=2) + logger.info(f"metadata in the wheel file is: {pretty_metadata}") + shutil.rmtree(dist_dir) + if '_' in extension_root_dir_name: + raise ValueError(f"Underscores `_` are not allowed in the extension root directory, " + f"please change it to a hyphen `-`.") + if metadata['name'] != extension_name: + raise ValueError(f"The name {metadata['name']} in setup.py " + f"is not the same as the extension name {extension_name}! \n" + f"Please fix the name in setup.py!") + + +def find_modified_files_against_master_branch(): + """ + Find modified files from src/ only. + A: Added, C: Copied, M: Modified, R: Renamed, T: File type changed. + Deleted files don't count in diff. + """ + ado_pr_target_branch = 'origin/' + os.environ.get('ADO_PULL_REQUEST_TARGET_BRANCH') + + separator_line() + logger.info('pull request target branch: %s', ado_pr_target_branch) + + cmd = 'git --no-pager diff --name-only --diff-filter=ACMRT {} -- src/'.format(ado_pr_target_branch) + files = check_output(cmd.split()).decode('utf-8').split('\n') + files = [f for f in files if len(f) > 0] + + if files: + logger.info('modified files:') + separator_line() + for f in files: + logger.info(f) + + return files + + +def contain_index_json(files): + return 'src/index.json' in files + + +def contain_extension_code(files): + with open('src/index.json', 'r') as fd: + current_extensions = json.loads(fd.read()).get("extensions") + + current_extension_homes = set('src/{}'.format(name) for name in current_extensions) + + for file in files: + if any([file.startswith(prefix) for prefix in current_extension_homes]): + return True + + # for new added extensions + for file in files: + if 'src/' in file and os.path.isfile(file) and os.path.isdir(os.path.dirname(file)): + new_extension_home = os.path.dirname(file) + + if os.path.isfile(os.path.join(new_extension_home, 'setup.py')): + return True + + return False + + +def azdev_on_external_extension(index_json, azdev_type): + """ + Check if the modified metadata items in index.json refer to the extension in repo. + If not, az extension check on wheel. Otherwise skip it. + """ + + public_extensions = json.loads(check_output('az extension list-available -d', shell=True)) + + with open(index_json, 'r') as fd: + current_extensions = json.loads(fd.read()).get("extensions") + + for name in current_extensions: + modified_entries = [entry for entry in current_extensions[name] if entry not in public_extensions.get(name, [])] + + if not modified_entries: + continue + + # check if source code exists, if so, skip + if os.path.isdir('src/{}'.format(name)): + continue + + separator_line() + + latest_entry = max(modified_entries, key=lambda c: parse_version(c['metadata']['version'])) + + az_extension = AzExtensionHelper(name) + az_extension.add_from_url(latest_entry['downloadUrl']) + + azdev_extension = AzdevExtensionHelper(name) + if azdev_type in ['all', 'linter']: + azdev_extension.linter() + # TODO: + # azdev style support external extension + # azdev test support external extension + # azdev_extension.style() + + logger.info('Checking service name for external extensions') + service_name.check() + + az_extension.remove() + + +def azdev_on_internal_extension(modified_files, azdev_type): + extension_names = set() + + for f in modified_files: + src, name, *_ = f.split('/') + if os.path.isdir(os.path.join(src, name)): + extension_names.add(name) + + if not extension_names: + separator_line() + logger.info('no extension source code modified, no extension needs to be checked') + + for name in extension_names: + separator_line() + + azdev_extension = AzdevExtensionHelper(name) + azdev_extension.add_from_code() + if azdev_type in ['all', 'linter']: + azdev_extension.linter() + azdev_extension.build() + azdev_extension.check_extension_name() + if azdev_type in ['all', 'style']: + try: + azdev_extension.style() + except CalledProcessError as e: + statement_msg = """ + ------------------- Please note ------------------- + This task does not block the PR merge. + And it is recommended if you want to create a separate PR to fix these style issues. + CLI will modify it to force block PR merge on 2025. + ---------------------- Thanks ---------------------- + """ + logger.error(statement_msg) + exit(1) + + logger.info('Checking service name for internal extensions') + service_name.check() + + azdev_extension.remove() + + +def main(): + import argparse + parser = argparse.ArgumentParser(description='azdev linter and azdev style on modified extensions') + parser.add_argument('--type', + type=str, + help='Control whether azdev linter, azdev style, azdev test needs to be run. ' + 'Supported values: linter, style, test, all, all is the default.', default='all') + args = parser.parse_args() + azdev_type = args.type + logger.info('azdev type: %s', azdev_type) + modified_files = find_modified_files_against_master_branch() + + if len(modified_files) == 1 and contain_index_json(modified_files): + # Scenario 1. + # This scenarios is for modify index.json only. + # If the modified metadata items refer to the extension code exits in this repo, PR is be created via Pipeline. + # If the modified metadata items refer to the extension code doesn't exist, PR is created from Service Team. + # We try to run azdev linter and azdev style on it. + azdev_on_external_extension(modified_files[0], azdev_type) + else: + # modified files contain more than one file + + if contain_extension_code(modified_files): + # Scenario 2, we reject. + if contain_index_json(modified_files): + raise ModifiedFilesNotAllowedError() + + azdev_on_internal_extension(modified_files, azdev_type) + else: + separator_line() + logger.info('no extension source code modified, no extension needs to be checked') + separator_line() + + +if __name__ == '__main__': + try: + main() + except ModifiedFilesNotAllowedError as e: + logger.error(e) + exit(1) diff --git a/src/scripts/ci/breaking_change_test.py b/src/scripts/ci/breaking_change_test.py new file mode 100644 index 00000000000..027fb6d3399 --- /dev/null +++ b/src/scripts/ci/breaking_change_test.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import json +import logging +import os +import subprocess +import sys + +from util import diff_code + +from azdev.utilities.path import get_cli_repo_path, get_ext_repo_paths +from subprocess import run + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) +ch = logging.StreamHandler() +ch.setLevel(logging.DEBUG) +logger.addHandler(ch) + +pull_request_number = os.environ.get('PULL_REQUEST_NUMBER', None) +job_name = os.environ.get('JOB_NAME', None) +azdev_test_result_dir = os.path.expanduser("~/.azdev/env_config/mnt/vss/_work/1/s/env") +src_branch = os.environ.get('PR_TARGET_BRANCH', None) +target_branch = 'merged_pr' +base_meta_path = '~/_work/1/base_meta' +diff_meta_path = '~/_work/1/diff_meta' +output_path = '~/_work/1/output_meta' + + +def install_extensions(diif_ref, branch): + for tname, ext_path in diif_ref: + ext_name = ext_path.split('/')[-1] + logger.info(f'installing extension: {ext_name}') + cmd = ['azdev', 'extension', 'add', ext_name] + logger.info(f'cmd: {cmd}') + out = run(cmd, capture_output=True, text=True) + if out.returncode and branch == 'base' and 'ERROR: extension(s) not found' in out.stderr: + print(f"{cmd} failed, extesion {ext_name} is not exist on base branch, skip it.") + sys.exit(0) + elif out.returncode: + raise RuntimeError(f"{cmd} failed") + + +def uninstall_extensions(diif_ref): + for tname, ext_path in diif_ref: + ext_name = ext_path.split('/')[-1] + logger.info(f'uninstalling extension: {ext_name}') + cmd = ['azdev', 'extension', 'remove', ext_name] + logger.info(f'cmd: {cmd}') + out = run(cmd) + if out.returncode: + raise RuntimeError(f"{cmd} failed") + + +def get_diff_meta_files(diff_ref): + cmd = ['git', 'checkout', '-b', target_branch] + print(cmd) + subprocess.run(cmd) + cmd = ['git', 'checkout', src_branch] + print(cmd) + subprocess.run(cmd) + cmd = ['git', 'checkout', target_branch] + print(cmd) + subprocess.run(cmd) + cmd = ['git', 'rev-parse', 'HEAD'] + print(cmd) + subprocess.run(cmd) + install_extensions(diff_ref, branch='target') + cmd = ['azdev', 'command-change', 'meta-export', '--src', src_branch, '--tgt', target_branch, '--repo', get_ext_repo_paths()[0], '--meta-output-path', diff_meta_path] + print(cmd) + subprocess.run(cmd) + cmd = ['ls', '-al', diff_meta_path] + print(cmd) + subprocess.run(cmd) + uninstall_extensions(diff_ref) + + +def get_base_meta_files(diff_ref): + cmd = ['git', 'checkout', src_branch] + print(cmd) + subprocess.run(cmd) + cmd = ['git', 'rev-parse', 'HEAD'] + print(cmd) + subprocess.run(cmd) + install_extensions(diff_ref, branch='base') + cmd = ['azdev', 'command-change', 'meta-export', 'EXT', '--meta-output-path', base_meta_path] + print(cmd) + subprocess.run(cmd) + cmd = ['ls', '-al', base_meta_path] + print(cmd) + subprocess.run(cmd) + + +def meta_diff(only_break=False): + if os.path.exists(diff_meta_path): + for file in os.listdir(diff_meta_path): + if file.endswith('.json'): + cmd = ['azdev', 'command-change', 'meta-diff', '--base-meta-file', os.path.join(base_meta_path, file), '--diff-meta-file', os.path.join(diff_meta_path, file), '--output-file', os.path.join(output_path, file)] + if only_break: + cmd.append('--only-break') + print(cmd) + subprocess.run(cmd) + cmd = ['ls', '-al', output_path] + print(cmd) + subprocess.run(cmd) + + +def get_pipeline_result(only_break=False): + pipeline_result = { + "breaking_change_test": { + "Details": [ + { + "TestName": "Azure CLI Extensions Breaking Change Test", + "Details": [] + } + ] + } + } + if pull_request_number != '$(System.PullRequest.PullRequestNumber)': + pipeline_result['pull_request_number'] = pull_request_number + if os.path.exists(output_path): + for file in os.listdir(output_path): + # skip empty file + if not os.path.getsize(os.path.join(output_path, file)): + continue + with open(os.path.join(output_path, file), 'r') as f: + items = json.load(f) + module = os.path.basename(file).split('.')[0].split('_')[1] + breaking_change = { + "Module": module, + "Status": "", + "Content": "" + } + status = 'Warning' + sorted_items = sorted(items, key=sort_by_content) + for item in sorted_items: + if item['is_break']: + status = 'Failed' + breaking_change['Content'] = build_markdown_content(item, breaking_change['Content']) + breaking_change['Status'] = status + pipeline_result['breaking_change_test']['Details'][0]['Details'].append(breaking_change) + if not pipeline_result['breaking_change_test']['Details'][0]['Details']: + pipeline_result['breaking_change_test']['Details'][0]['Details'].append({ + "Module": "Non Breaking Changes", + "Status": "Succeeded", + "Content": "" + }) + + result_length = len(json.dumps(pipeline_result, indent=4)) + if result_length > 65535: + if only_break: + logger.error("Breaking change report exceeds 65535 characters even with only_break=True.") + return pipeline_result + + logger.info("Regenerating breaking change report with only_break=True to control length within 65535.") + meta_diff(only_break=True) + pipeline_result = get_pipeline_result(only_break=True) + return pipeline_result + + return pipeline_result + + +def sort_by_content(item): + # Sort item by is_break, cmd_name and rule_message, + is_break = 0 if item['is_break'] else 1 + cmd_name = item['cmd_name'] if 'cmd_name' in item else item['subgroup_name'] + return is_break, cmd_name, item['rule_message'] + + +def build_markdown_content(item, content): + if content == "": + content = f'|rule|cmd_name|rule_message|suggest_message|\n|---|---|---|---|\n' + rule_link = f'[{item["rule_id"]} - {item["rule_name"]}]({item["rule_link_url"]})' + rule = f'❌ {rule_link} ' if item['is_break'] else f'⚠️ {rule_link}' + cmd_name = item['cmd_name'] if 'cmd_name' in item else item['subgroup_name'] + rule_message = item['rule_message'] + suggest_message = item['suggest_message'] + content += f'|{rule}|{cmd_name}|{rule_message}|{suggest_message}|\n' + return content + + +def save_pipeline_result(pipeline_result): + # save pipeline result to file + # /mnt/vss/.azdev/env_config/mnt/vss/_work/1/s/env/breaking_change_test.json + filename = os.path.join(azdev_test_result_dir, f'breaking_change_test.json') + with open(filename, 'w') as f: + json.dump(pipeline_result, f, indent=4) + logger.info(f"save pipeline result to file: {filename}") + + +def main(): + if pull_request_number != '$(System.PullRequest.PullRequestNumber)': + logger.info("Start breaking change test ...\n") + diff_ref = diff_code(src_branch, 'HEAD') + get_diff_meta_files(diff_ref) + get_base_meta_files(diff_ref) + meta_diff() + pipeline_result = get_pipeline_result() + save_pipeline_result(pipeline_result) + + +if __name__ == '__main__': + main() diff --git a/src/scripts/ci/build_ext_cmd_tree.sh b/src/scripts/ci/build_ext_cmd_tree.sh new file mode 100644 index 00000000000..53bed033b85 --- /dev/null +++ b/src/scripts/ci/build_ext_cmd_tree.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash + +changed_content=$(git --no-pager diff --diff-filter=ACMRT HEAD~$AZURE_EXTENSION_COMMIT_NUM -- src/index.json) +if [[ -z "$changed_content" ]]; then + echo "index.json not modified. End task." + exit 0 +fi + +pip install azure-cli-core azure-cli requests +pip install azure-storage-blob==1.5.0 +echo "Listing Available Extensions:" +az extension list-available -otable + +# turn off telemetry as it crowds output +export AZURE_CORE_COLLECT_TELEMETRY=False + +# use index.json in master branch +export AZURE_EXTENSION_INDEX_URL=https://raw.githubusercontent.com/Azure/azure-cli-extensions/master/src/index.json + +output=$(az extension list-available --query [].name -otsv) +# azure-cli-ml is replaced by ml +# disable alias which relies on Jinja2 2.10 +blocklist=("azure-cli-ml" "alias") + +rm -f ~/.azure/extCmdTreeToUpload.json + +filter_exts="" +for ext in $output; do + ext=${ext%$'\r'} # Remove a trailing newline when running on Windows. + if [[ " ${blocklist[@]} " =~ " ${ext} " ]]; then + continue + fi + filter_exts="${filter_exts} ${ext}" + echo "Adding extension:" $ext + az extension add --upgrade -n $ext + if [ $? != 0 ] + then + echo "Failed to load:" $ext + exit 1 + fi +done + +python $(cd $(dirname $0); pwd)/update_ext_cmd_tree.py $filter_exts diff --git a/src/scripts/ci/codegen_cal.py b/src/scripts/ci/codegen_cal.py new file mode 100644 index 00000000000..92129befa6e --- /dev/null +++ b/src/scripts/ci/codegen_cal.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +# pylint: disable=line-too-long +import os +import re +import argparse +from util import get_repo_root + +output_file = os.environ.get('output_file', None) +result_path = os.environ.get('result_path', None) + +cli_ext_path = get_repo_root() +cli_ext_src_path = os.path.join(cli_ext_path, "src") +print("cli_ext_path: ", cli_ext_path) +print("cli_ext_src_path: ", cli_ext_src_path) + +DEFAULT_SURVEY_MESSAGE = "Thank you for using our CodeGen tool. We value your feedback, and we would like to know how we can improve our product. Please take a few minutes to fill our [codegen survey](https://forms.office.com/r/j6rQuFUqUf?origin=lprLink) " + +def check_is_module_aaz_related(mod): + codegen_aaz_folder_pattern = re.compile(r"src/%s/azext_.*?/aaz/" % mod) + module_path = os.path.join(cli_ext_src_path, mod) + print("module_path: ", module_path) + for root, subdir, files in os.walk(module_path): + codegen_aaz_match = re.findall(codegen_aaz_folder_pattern, root) + if codegen_aaz_match: + print(codegen_aaz_match) + return True + return False + +def save_comment_pr_survey(comment_pr_survey): + print("check comment_pr_survey: ", comment_pr_survey) + with open(os.path.join(cli_ext_path, result_path, output_file), "w") as f: + f.write(str(comment_pr_survey) + "\n") + +def check_aaz_module(): + comment_pr_survey = 0 + changed_module_list = os.environ.get('changed_module_list', "").split() + for mod in changed_module_list: + if check_is_module_aaz_related(mod): + comment_pr_survey = 1 + break + save_comment_pr_survey(comment_pr_survey) + if comment_pr_survey == 1: + comment_message = [] + add_survey_hint_message(comment_message) + save_comment_message(comment_message) + +def add_survey_hint_message(comment_message): + comment_message.append("## CodeGen Tools Feedback Collection") + comment_message.append(DEFAULT_SURVEY_MESSAGE) + +def save_comment_message(comment_message): + print("comment_message:") + print(comment_message) + survey_comment_file = os.environ.get('survey_comment_file', "") + with open(os.path.join(cli_ext_path, result_path, survey_comment_file), "w") as f: + for line in comment_message: + f.write(line + "\n") + +def save_gh_output(): + with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: + print(f'CommentAAZSurvey={comment_pr_survey}', file=fh) + +def set_aaz_comment(): + if not os.path.exists(os.path.join(cli_ext_path, result_path, output_file)): + print("error in file dowdload") + return + comment_pr_survey = 0 + with open(os.path.join(cli_ext_path, result_path, output_file), "r") as f: + for line in f: + comment_pr_survey = int(line.strip()) + print("comment_pr_survey: ", comment_pr_survey) + save_gh_output() + if comment_pr_survey: + comment_message = [] + add_survey_hint_message(comment_message) + save_comment_message(comment_message) + +def main(job): + if job == "check": + check_aaz_module() + elif job == "set": + set_aaz_comment() + else: + print("unsupported job type") + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument("--job", choices=["check", "set"], required=True, help="job type") + args = parser.parse_args() + print(vars(args)) + main(args.job) \ No newline at end of file diff --git a/src/scripts/ci/credscan/CredScanSuppressions.json b/src/scripts/ci/credscan/CredScanSuppressions.json new file mode 100644 index 00000000000..3b7a8439c27 --- /dev/null +++ b/src/scripts/ci/credscan/CredScanSuppressions.json @@ -0,0 +1,299 @@ +{ + "tool": "Credential Scanner", + "suppressions": [ + { + "placeholder": "ManangementGroupServicePrincipal%40123", + "_justification": "[ManagementGroups] hard code dummy password" + }, + { + "file": [ + "src\\eventgrid\\azext_eventgrid\\tests\\latest\\recordings\\test_create_domain.yaml", + "src\\eventgrid\\azext_eventgrid\\tests\\latest\\recordings\\test_create_topic.yaml" + ], + "_justification": "false alarm about 'Found General Symmetric Key'" + }, + { + "file": [ + "src\\mixed-reality\\azext_mixed_reality\\tests\\latest\\recordings\\test_spatial_anchors_account_scenario.yaml" + ], + "_justification": "[MixedReality] Found Azure Shared Access Key / Web Hook Token" + }, + { + "placeholder": "aduser", + "_justification": "[NetAppFiles] Add suppression for false alarm in comments of _help.py" + }, + { + "placeholder": "AZURE_CLIENT_SECRET", + "_justification": "[db_up] false alarm about environment variable name" + }, + { + "placeholder": "ADPassword", + "_justification": "[SQL] false alarm about AuthenticationType enum value" + }, + { + "placeholder": "ActiveDirectoryPassword", + "_justification": "[DataMigration] false alarm about AuthenticationType enum value" + }, + { + "placeholder": "Ovg+o0K/0/2V8upg7AwlyAPCriEcOSXKuBu2Gv/PU70Y7aWDW3C2ZRmw6kYWqPWBaM1GosLkcSZkgsobAlT+Sw==", + "_justification": "[ADLS] false alarm on sign value" + }, + { + "placeholder": "4CTlhouPm0c3PWuTQ8t6Myh/FYegVUPqXUmdtL2byRytFPlt98L/pw==", + "_justification": "verification code in test_eventgrid_commands.py" + }, + { + "placeholder": "7jTiaEBVeYjC8X6gPDUhIhAnFRjaxZaGyS3hBbr09bmj3heQNhvrbA==", + "_justification": "verification code in test_eventgrid_commands.py" + }, + { + "placeholder": "Password123!", + "_justification": "dummy password in test_synapse_scenario.py" + }, + { + "file": [ + "src\\timeseriesinsights\\azext_timeseriesinsights\\tests\\latest\\recordings\\test_timeseriesinsights_environment_longterm.yaml", + "src\\timeseriesinsights\\azext_timeseriesinsights\\tests\\latest\\recordings\\test_timeseriesinsights_event_source_eventhub.yaml", + "src\\timeseriesinsights\\azext_timeseriesinsights\\tests\\latest\\recordings\\test_timeseriesinsights_event_source_iothub.yaml" + ], + "_justification": "Azure storgae access key" + }, + { + "file": [ + "src\\maintenance\\azext_maintenance\\tests\\latest\\recordings\\test_maintenance_commands.yaml", + "src\\maintenance\\azext_maintenance\\tests\\latest\\recordings\\test_maintenance_configuration_create.yaml", + "src\\maintenance\\azext_maintenance\\tests\\latest\\recordings\\test_signalr_commands.yaml", + "src\\notification-hub\\azext_notification_hub\\tests\\latest\\recordings\\test_notificationhubs.yaml" + ], + "_justification": "Azure Shared Access Key / Web Hook Token" + }, + { + "file": [ + "src\\eventgrid\\azext_eventgrid\\tests\\latest\\recordings\\test_Partner_scenarios.yaml" + ], + "_justification": "Found General Symmetric Key" + }, + { + "file": [ + "src\\datafactory\\azext_datafactory\\vendored_sdks\\datafactory\\models\\_data_factory_management_client_enums.py", + "src\\datafactory\\azext_datafactory\\vendored_sdks\\datafactory\\models\\_models_py3.py" + ], + "_justification": "Dummy secrets for one-off resources" + }, + { + "file": [ + "src\\communication\\azext_communication\\tests\\latest\\recordings\\test_communication_scenario.yaml" + ], + "_justification": "Dummy resources' tokens left during testing Micorosfot.Communication" + }, + { + "file": [ + "src\\application-insights\\azext_applicationinsights\\tests\\latest\\recordings\\test_api_key.yaml" + ], + "_justification": "random keys for one-off resources" + }, + { + "file": [ + "src\\stream-analytics\\azext_stream_analytics\\_help.py", + "src\\appservice-kube\\azext_appservice_kube\\_help.py" + ], + "_justification": "dummy passwords for one-off resources" + }, + { + "file": [ + "src\\storage-blob-preview\\azext_storage_blob_preview\\tests\\latest\\recordings\\test_storage_blob_incremental_copy.yaml" + ], + "_justification": "[Storage] response body contains random value recognized as secret in outdated recoding files of storage may remove in the future" + }, + { + "file": [ + "src\\image-gallery\\azext_image_gallery\\vendored_sdks\\azure_mgmt_compute\\models\\_models.py", + "src\\image-gallery\\azext_image_gallery\\vendored_sdks\\azure_mgmt_compute\\models\\_models_py3.py" + ], + "_justification": "Python SDK uses the example of fake password to indicate the format" + }, + { + "file": [ + "src\\aks-preview\\azext_aks_preview\\tests\\latest\\data\\setup_proxy.sh" + ], + "_justification": "Dummy self-signed certificate + private key used for testing only." + }, + { + "file": [ + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_acr.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_dapr_components.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_storage.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_identity_system.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_ingress_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_ingress_traffic_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_logstream.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_update.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_dapr_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_image_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_custom_domains_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_revision_label_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\cert.pfx", + "src\\containerapp\\azext_containerapp\\tests\\latest\\test_containerapp_commands.py", + "src\\containerapp\\azext_containerapp\\tests\\latest\\test_containerapp_env_commands.py", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_registry_msi.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_update_containers.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_anonymous_registry.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_identity_user.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_registry_identity_user.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_identity_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\recordings\\test_containerapp_scale_create.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_basic_no_existing_resources.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_environment.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_environment_prompt.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_ingress_both.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_ingress_external.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_ingress_internal.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_ingress_prompt.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_registry_all_args.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_registry_server_arg_only.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_replicas_global_scale.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_replicas_replicated_mode.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_resources_from_both_cpus_and_deploy_cpu.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_resources_from_deploy_cpu.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_resources_from_service_cpus.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_secrets.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_secrets_and_existing_environment.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_secrets_and_existing_environment_conflict.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_transport_arg.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_with_command_list.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_with_command_list_and_entrypoint.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_with_command_string.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_transport_mapping_arg.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_session_code_interpreter_nodelts_registry_e2e.yaml" + ], + "_justification": "Dummy resources' keys left during testing Microsoft.App (required for log-analytics to create managedEnvironments)" + }, + { + "file":[ + "src\\diskpool\\README.md", + "src\\datamigration\\README.md" + ], + "_justification": "README file example password" + }, + { + "file":[ + "src\\aks-preview\\azext_aks_preview\\_help.py" + ], + "_justification": "False positive detection, reported credentital not found." + }, + { + "file":[ + "src\\containerapp\\azext_containerapp\\tests\\latest\\test_containerapp_connected_env_scenario.py", + "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\test_cosmosdb_mongocluster_scenario.py", + "src\\devcenter\\azext_devcenter\\tests\\latest\\helper.py", + "src\\devcenter\\azext_devcenter\\tests\\latest\\test_devcenter_scenario.py", + "src\\image-gallery\\azext_image_gallery\\tests\\latest\\test_image_gallery.py", + "src\\scvmm\\azext_scvmm\\tests\\latest\\test_scvmm_scenario.py", + "src\\vm-repair\\azext_vm_repair\\tests\\latest\\test_repair_commands.py" + ], + "_justification": "Fake password for testing." + }, + { + "file":[ + "src\\kusto\\azext_kusto\\tests\\latest\\example_steps.py" + ], + "_justification": "Fake token for testing." + }, + { + "file":[ + "src\\palo-alto-networks\\azext_palo_alto_networks\\tests\\latest\\test_palo_alto_networks.py", + "src\\workloads\\azext_workloads\\tests\\latest\\create_infra_distributed_non_ha_config.json", + "src\\workloads\\azext_workloads\\tests\\latest\\InstallPayload.json" + ], + "_justification": "Fake key for testing." + }, + { + "file":[ + "src\\aks-preview\\azext_aks_preview\\tests\\latest\\recordings\\test_aks_update_with_windows_password.yaml", + "src\\application-insights\\azext_applicationinsights\\tests\\latest\\recordings\\test_connect_webapp.yaml", + "src\\application-insights\\azext_applicationinsights\\tests\\latest\\recordings\\test_connect_webapp_cross_resource_group.yaml", + "src\\appservice-kube\\azext_appservice_kube\\tests\\latest\\recordings\\test_linux_webapp_quick_create_kube.yaml", + "src\\appservice-kube\\azext_appservice_kube\\tests\\latest\\recordings\\test_webapp_elastic_scale_min_elastic_instance_count_kube.yaml", + "src\\appservice-kube\\azext_appservice_kube\\tests\\latest\\recordings\\test_webapp_elastic_scale_prewarmed_instance_count_kube.yaml", + "src\\appservice-kube\\azext_appservice_kube\\tests\\latest\\recordings\\test_win_webapp_quick_create_runtime_kube.yaml", + "src\\authV2\\azext_authV2\\tests\\latest\\recordings\\test_authV2_auth.yaml", + "src\\authV2\\azext_authV2\\tests\\latest\\recordings\\test_authV2_authclassic.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerappjob_create_with_environment_id.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerappjob_create_with_yaml.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_environment_to_target_location.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_create_and_update_with_env_vars_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_create_with_vnet_yaml.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_dev_add_on_binding_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_certificate_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_certificate_upload_with_certificate_name.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_custom_domains.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_internal_only_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_la_dynamic_json.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_logs_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_msi_certificate.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_msi_custom_domains.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_mtls.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_p2p_traffic_encryption.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_update_custom_domains.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_usages.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_get_customdomainverificationid_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_java_component.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_java_component_deprecated.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_managed_service_binding_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_patch_list_and_apply_with_node18_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_patch_list_and_apply_with_python310_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_patch_list_and_apply_with_show_all_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_preview_connected_env_certificate.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_preview_connected_env_certificate_upload_with_certificate_name.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_preview_create_with_environment_id.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_resiliency.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_sessionpool.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_sessionpool_registry.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_session_code_interpreter_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_session_code_interpreter_nodelts_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_mooncake.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_source_with_default_registry_image.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_app_mount_azurefile_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_app_mount_nfsazurefile_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_app_mount_secret_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_app_mount_secret_update_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_dapr_component_resiliency.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_source_with_dockerfile_e2e.yaml", + "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_source_with_multiple_environments_e2e.yaml", + "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\recordings\\test_cosmosdb_collection.yaml", + "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\recordings\\test_cosmosdb_database.yaml", + "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\recordings\\test_cosmosdb_mongocluster_crud.yaml", + "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\recordings\\test_cosmosdb_mongocluster_firewall.yaml", + "src\\elastic\\azext_elastic\\tests\\latest\\recordings\\test_elastic_monitor.yaml", + "src\\image-gallery\\azext_image_gallery\\tests\\latest\\recordings\\test_community_gallery_operations.yaml", + "src\\image-gallery\\azext_image_gallery\\tests\\latest\\recordings\\test_create_vm_with_community_gallery_image.yaml", + "src\\image-gallery\\azext_image_gallery\\tests\\latest\\recordings\\test_shared_gallery_community.yaml", + "src\\kusto\\azext_kusto\\tests\\latest\\recordings\\test_kusto_Scenario.yaml", + "src\\palo-alto-networks\\azext_palo_alto_networks\\tests\\latest\\recordings\\test_palo_alto_firewall_v2.yaml", + "src\\purview\\azext_purview\\tests\\latest\\recordings\\test_purview_account.yaml", + "src\\quantum\\azext_quantum\\tests\\latest\\recordings\\test_workspace_keys.yaml", + "src\\qumulo\\azext_qumulo\\tests\\latest\\recordings\\test_file_system.yaml", + "src\\rdbms-connect\\azext_rdbms_connect\\tests\\latest\\recordings\\test_mysql_flexible_server_connect.yaml", + "src\\rdbms-connect\\azext_rdbms_connect\\tests\\latest\\recordings\\test_postgres_flexible_server_connect.yaml", + "src\\redisenterprise\\azext_redisenterprise\\tests\\latest\\recordings\\test_redisenterprise_scenario1.yaml", + "src\\redisenterprise\\azext_redisenterprise\\tests\\latest\\recordings\\test_redisenterprise_scenario2.yaml", + "src\\scvmm\\azext_scvmm\\tests\\latest\\recordings\\test_scvmm.yaml", + "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_api_portal.yaml", + "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_app_crud.yaml", + "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_app_crud_1.yaml", + "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_app_deploy_container.yaml", + "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_app_deploy_container_command.yaml", + "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_blue_green_deployment.yaml", + "src\\staticwebapp\\azext_staticwebapp\\tests\\latest\\recordings\\test_staticwebapp_dbconnection_cosmosdb.yaml", + "src\\vmware\\azext_vmware\\tests\\latest\\recordings\\test_vmware_global_reach_connection.yaml", + "src\\workloads\\azext_workloads\\tests\\latest\\recordings\\test_workloads_svi.yaml", + "src\\workloads\\azext_workloads\\tests\\latest\\recordings\\test_workloads_svi_install.yaml", + "src\\oracle-database\\azext_oracle_database\\tests\\latest\\recordings\\test_oracledatabase_adbs.yaml", + "src\\storage-preview\\azext_storage_preview\\tests\\latest\\recordings\\test_storage_account_local_user.yaml" + ], + "_justification": "Fake credentials for recordings reported by new version credential scanner." + } + ] +} diff --git a/src/scripts/ci/index_ref_doc.py b/src/scripts/ci/index_ref_doc.py new file mode 100644 index 00000000000..63589122d34 --- /dev/null +++ b/src/scripts/ci/index_ref_doc.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from __future__ import print_function + +import os +import sys +import tempfile +import traceback +import unittest +import shutil +from subprocess import check_call, CalledProcessError +from pkg_resources import parse_version, get_distribution + +from six import with_metaclass + +from util import get_index_data, get_whl_from_url, get_repo_root + + +REF_GEN_SCRIPT = os.path.join(get_repo_root(), 'scripts', 'refdoc', 'generate.py') + +REF_DOC_OUT_DIR = os.environ.get('AZ_EXT_REF_DOC_OUT_DIR', tempfile.mkdtemp()) + +if not os.path.isdir(REF_DOC_OUT_DIR): + print('{} is not a directory'.format(REF_DOC_OUT_DIR)) + sys.exit(1) + +ALL_TESTS = [] + +CLI_VERSION = get_distribution('azure-cli').version + +for extension_name, exts in get_index_data()['extensions'].items(): + parsed_cli_version = parse_version(CLI_VERSION) + filtered_exts = [] + for ext in exts: + if parsed_cli_version <= parse_version(ext['metadata'].get('azext.maxCliCoreVersion', CLI_VERSION)): + filtered_exts.append(ext) + if not filtered_exts: + continue + + candidates_sorted = sorted(filtered_exts, key=lambda c: parse_version(c['metadata']['version']), reverse=True) + chosen = candidates_sorted[0] + ALL_TESTS.append((extension_name, chosen['downloadUrl'], chosen['filename'])) + + +class TestIndexRefDocsMeta(type): + def __new__(mcs, name, bases, _dict): + + def gen_test(ext_name, ext_url, filename): + def test(self): + ext_file = get_whl_from_url(ext_url, filename, self.whl_dir) + ref_doc_out_dir = os.path.join(REF_DOC_OUT_DIR, ext_name) + if not os.path.isdir(ref_doc_out_dir): + os.mkdir(ref_doc_out_dir) + script_args = [sys.executable, REF_GEN_SCRIPT, '--extension-file', ext_file, '--output-dir', + ref_doc_out_dir] + try: + check_call(script_args) + except CalledProcessError as e: + traceback.print_exc() + raise e + return test + + for ext_name, ext_url, filename in ALL_TESTS: + test_name = "test_ref_doc_%s" % ext_name + _dict[test_name] = gen_test(ext_name, ext_url, filename) + return type.__new__(mcs, name, bases, _dict) + + +class IndexRefDocs(with_metaclass(TestIndexRefDocsMeta, unittest.TestCase)): + + def setUp(self): + self.whl_dir = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self.whl_dir) + + +if __name__ == '__main__': + unittest.main() diff --git a/src/scripts/ci/release_version_cal.py b/src/scripts/ci/release_version_cal.py new file mode 100644 index 00000000000..da0047c1343 --- /dev/null +++ b/src/scripts/ci/release_version_cal.py @@ -0,0 +1,378 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +# pylint: disable=line-too-long +import os +import re +import json +import subprocess +from packaging.version import parse + +from azdev.utilities.path import get_cli_repo_path, get_ext_repo_paths +from azdev.operations.extensions import cal_next_version +from azdev.operations.constant import (PREVIEW_INIT_SUFFIX, VERSION_MAJOR_TAG, VERSION_MINOR_TAG, + VERSION_PATCH_TAG, VERSION_STABLE_TAG, VERSION_PREVIEW_TAG, VERSION_PRE_TAG) +from util import get_index_data + +base_meta_path = os.environ.get('base_meta_path', None) +diff_meta_path = os.environ.get('diff_meta_path', None) +result_path = os.environ.get('result_path', None) +output_file = os.environ.get('output_file', None) +add_labels_file = os.environ.get('add_labels_file', None) +remove_labels_file = os.environ.get('remove_labels_file', None) +pr_user = os.environ.get('pr_user', "") + +changed_module_list = os.environ.get('changed_module_list', "").split() +diff_code_file = os.environ.get('diff_code_file', "") +print("diff_code_file:", diff_code_file) +pr_label_list = os.environ.get('pr_label_list', "") +pr_label_list = [name.lower().strip().strip('"').strip("'") for name in json.loads(pr_label_list)] + +DEFAULT_VERSION = "0.0.0" +INIT_RELEASE_VERSION = "1.0.0b1" +DEFAULT_MESSAGE = " - For more info about extension versioning, please refer to [Extension version schema](https://github.com/Azure/azure-cli/blob/release/doc/extensions/versioning_guidelines.md)" +block_pr = 0 + +cli_ext_path = get_ext_repo_paths()[0] +print("get_cli_repo_path: ", get_cli_repo_path()) +print("get_ext_repo_paths: ", cli_ext_path) + + +def extract_module_history_update_info(mod_update_info, mod): + """ + re pattern: + --- a/src/monitor-control-service/HISTORY.(rst|md) + +++ b/src/monitor-control-service/HISTORY.(rst|md) + """ + mod_update_info["history_updated"] = False + module_history_update_pattern = re.compile(r"\+\+\+.*?src/%s/HISTORY\.(rst|md)" % mod) + with open(diff_code_file, "r") as f: + for line in f: + mod_history_update_match = re.findall(module_history_update_pattern, line) + if mod_history_update_match: + mod_update_info["history_updated"] = True + + +def extract_module_version_update_info(mod_update_info, mod): + """ + re pattern: + --- a/src/monitor-control-service/setup.py + +++ b/src/monitor-control-service/setup.py + -VERSION = '1.0.1' + +VERSION = '1.1.1' + --- a/src/monitor-control-service/HISTORY.RST + py files exclude tests, vendored_sdks and aaz folder + """ + diff_file_started = False + module_setup_update_pattern = re.compile(r"\+\+\+.*?src/%s/(?!.*(?:tests|vendored_sdks|aaz)/).*?.py" % mod) + module_version_update_pattern = re.compile(r"\+\s?VERSION\s?\=\s?[\'\"]([0-9\.b]+)[\'\"]") + with open(diff_code_file, "r") as f: + for line in f: + if diff_file_started: + if mod_update_info.get("version_diff", None): + break + if line.find("diff") == 0: + diff_file_started = False + continue + mod_version_update_match = re.findall(module_version_update_pattern, line) + if mod_version_update_match and len(mod_version_update_match) == 1: + mod_update_info["version_diff"] = mod_version_update_match[0] + else: + mod_setup_update_match = re.findall(module_setup_update_pattern, line) + if mod_setup_update_match: + diff_file_started = True + + +def extract_module_metadata_update_info(mod_update_info, mod): + """ + re pattern: + --- a/src/monitor-control-service/azext_amcs/azext_metadata.json + +++ b/src/monitor-control-service/azext_amcs/azext_metadata.json + - "azext.isPreview": true + + "azext.isPreview": true + --- a/src/monitor-control-service/HISTORY.RST + """ + mod_update_info["meta_updated"] = False + module_meta_update_pattern = re.compile(r"\+\+\+.*?src/%s/azext_.*?/azext_metadata.json" % mod) + module_ispreview_add_pattern = re.compile(r"\+.*?azext.isPreview.*?true") + module_ispreview_remove_pattern = re.compile(r"\-.*?azext.isPreview.*?true") + module_isexp_add_pattern = re.compile(r"\+.*?azext.isExperimental.*?true") + module_isexp_remove_pattern = re.compile(r"\-.*?azext.isExperimental.*?true") + with open(diff_code_file, "r") as f: + for line in f: + if mod_update_info["meta_updated"]: + if line.find("---") == 0: + break + ispreview_add_match = re.findall(module_ispreview_add_pattern, line) + if ispreview_add_match and len(ispreview_add_match): + mod_update_info["preview_tag_diff"] = "add" + ispreview_remove_match = re.findall(module_ispreview_remove_pattern, line) + if ispreview_remove_match and len(ispreview_remove_match): + mod_update_info["preview_tag_diff"] = "remove" + isexp_add_match = re.findall(module_isexp_add_pattern, line) + if isexp_add_match and len(isexp_add_match): + mod_update_info["exp_tag_diff"] = "add" + isexp_remove_match = re.findall(module_isexp_remove_pattern, line) + if isexp_remove_match and len(isexp_remove_match): + mod_update_info["exp_tag_diff"] = "remove" + else: + module_meta_update_match = re.findall(module_meta_update_pattern, line) + if module_meta_update_match: + mod_update_info["meta_updated"] = True + + +def find_module_metadata_of_latest_version(mod): + cmd = ["azdev", "extension", "show", "--mod-name", mod, "--query", "pkg_name", "-o", "tsv"] + result = subprocess.run(cmd, stdout=subprocess.PIPE) + if result.returncode == 0: + mod = result.stdout.decode("utf8").strip() + return get_module_metadata_of_max_version(mod) + + +def extract_module_version_info(mod_update_info, mod): + next_version_pre_tag = get_next_version_pre_tag() + next_version_segment_tag = get_next_version_segment_tag() + print("next_version_pre_tag: ", next_version_pre_tag) + print("next_version_segment_tag: ", next_version_segment_tag) + base_meta_file = os.path.join(cli_ext_path, base_meta_path, "az_" + mod + "_meta.json") + diff_meta_file = os.path.join(cli_ext_path, diff_meta_path, "az_" + mod + "_meta.json") + if not os.path.exists(base_meta_file) and not os.path.exists(diff_meta_file): + print("no base and diff meta file found for {0}".format(mod)) + return + elif not os.path.exists(base_meta_file) and os.path.exists(diff_meta_file): + print("no base meta file found for {0}".format(mod)) + mod_update_info.update({"version": INIT_RELEASE_VERSION, "preview_tag": "add"}) + return + elif not os.path.exists(diff_meta_file): + print("no diff meta file found for {0}".format(mod)) + return + pre_release = find_module_metadata_of_latest_version(mod) + if pre_release is None: + next_version = cal_next_version(base_meta_file=base_meta_file, diff_meta_file=diff_meta_file, + current_version=DEFAULT_VERSION, + next_version_pre_tag=next_version_pre_tag, + next_version_segment_tag=next_version_segment_tag) + else: + next_version = cal_next_version(base_meta_file=base_meta_file, diff_meta_file=diff_meta_file, + current_version=pre_release['metadata']['version'], + is_preview=pre_release['metadata'].get("azext.isPreview", None), + is_experimental=pre_release['metadata'].get("azext.isExperimental", None), + next_version_pre_tag=next_version_pre_tag, + next_version_segment_tag=next_version_segment_tag) + mod_update_info.update(next_version) + + +def fill_module_update_info(mods_update_info): + for mod in changed_module_list: + update_info = {} + extract_module_history_update_info(update_info, mod) + extract_module_version_update_info(update_info, mod) + extract_module_metadata_update_info(update_info, mod) + extract_module_version_info(update_info, mod) + mods_update_info[mod] = update_info + print("mods_update_info") + print(mods_update_info) + + +def get_module_metadata_of_max_version(mod): + if mod not in get_index_data()['extensions']: + print("No previous release for {0}".format(mod)) + return None + pre_releases = get_index_data()['extensions'][mod] + candidates_sorted = sorted(pre_releases, key=lambda c: parse(c['metadata']['version']), reverse=True) + chosen = candidates_sorted[0] + return chosen + + +def get_next_version_pre_tag(): + if VERSION_STABLE_TAG in pr_label_list: + return VERSION_STABLE_TAG + elif VERSION_PREVIEW_TAG in pr_label_list: + return VERSION_PREVIEW_TAG + else: + return None + + +def get_next_version_segment_tag(): + """ + manual label order: + major > minor > patch > pre + """ + if VERSION_MAJOR_TAG in pr_label_list: + return VERSION_MAJOR_TAG + elif VERSION_MINOR_TAG in pr_label_list: + return VERSION_MINOR_TAG + elif VERSION_PATCH_TAG in pr_label_list: + return VERSION_PATCH_TAG + elif VERSION_PRE_TAG in pr_label_list: + return VERSION_PRE_TAG + else: + return None + + +def add_suggest_header(comment_message): + if block_pr == 1: + comment_message.insert(0, "## :warning: Release Requirements") + else: + comment_message.insert(0, "## Release Suggestions") + comment_message.insert(0, "Hi @" + pr_user) + + +def gen_history_comment_message(mod, mod_update_info, mod_message): + if not mod_update_info["history_updated"]: + mod_message.append(" - Please log updates into to `src/{0}/HISTORY.rst`".format(mod)) + + +def gen_version_comment_message(mod, mod_update_info, mod_message): + global block_pr + if not mod_update_info.get("version_diff", None): + if mod_update_info.get("version", None): + mod_message.append(" - Update `VERSION` to `{0}` in `src/{1}/setup.py`".format(mod_update_info.get("version", "-"), mod)) + else: + if mod_update_info.get("version", None): + bot_version = parse(mod_update_info['version']) + if mod_update_info.get("version_diff", None): + diff_version = parse(mod_update_info['version_diff']) + if diff_version != bot_version: + block_pr = 1 + mod_message.append(" - :warning: Please update `VERSION` to be `{0}` in `src/{1}/setup.py`".format(mod_update_info.get("version", "-"), mod)) + else: + mod_message.append(" - Update `VERSION` to `{0}` in `src/{1}/setup.py`".format(mod_update_info.get("version", "-"), mod)) + + +def gen_preview_comment_message(mod, mod_update_info, mod_message): + global block_pr + if mod_update_info.get("preview_tag", "-") == mod_update_info.get("preview_tag_diff", "-"): + return + preview_comment_message = " - " + if mod_update_info.get("version_diff", None): + block_pr = 1 + preview_comment_message += ":warning: " + if mod_update_info.get("preview_tag", None) and mod_update_info.get("preview_tag_diff", None): + if mod_update_info["preview_tag"] == "add" and mod_update_info["preview_tag_diff"] == "remove": + preview_comment_message += 'Set `azext.isPreview` to `true` in azext_metadata.json for {0}'.format(mod) + elif mod_update_info["preview_tag"] == "remove" and mod_update_info["preview_tag_diff"] == "add": + preview_comment_message += 'Remove `azext.isPreview: true` in azext_metadata.json for {0}'.format(mod) + elif not mod_update_info.get("preview_tag", None) and mod_update_info.get("preview_tag_diff", None): + if mod_update_info["preview_tag_diff"] == "add": + preview_comment_message += 'Remove `azext.isPreview: true` in azext_metadata.json for {0}'.format(mod) + elif mod_update_info["preview_tag_diff"] == "remove": + preview_comment_message += 'Set `azext.isPreview` to `true` in azext_metadata.json for {0}'.format(mod) + elif mod_update_info.get("preview_tag", None) and not mod_update_info.get("preview_tag_diff", None): + if mod_update_info["preview_tag"] == "add": + preview_comment_message += 'Set `azext.isPreview` to `true` in azext_metadata.json for {0}'.format(mod) + elif mod_update_info["preview_tag"] == "remove": + preview_comment_message += 'Remove `azext.isPreview: true` in azext_metadata.json for {0}'.format(mod) + mod_message.append(preview_comment_message) + + +def gen_exp_comment_message(mod, mod_update_info, mod_message): + global block_pr + if mod_update_info.get("exp_tag", "-") == mod_update_info.get("exp_tag_diff", "-"): + return + exp_comment_message = " - " + if mod_update_info.get("version_diff", None): + block_pr = 1 + exp_comment_message += ":warning: " + if mod_update_info.get("exp_tag", None) and mod_update_info.get("exp_tag_diff", None): + if mod_update_info["exp_tag"] == "remove" and mod_update_info["exp_tag_diff"] == "add": + exp_comment_message += 'Remove `azext.isExperimental: true` in azext_{0}/azext_metadata.json'.format(mod) + if mod_update_info["exp_tag"] == "add" and mod_update_info["exp_tag_diff"] == "remove": + exp_comment_message += 'Set `azext.isExperimental` to `true` in azext_metadata.json for {0}'.format(mod) + elif not mod_update_info.get("exp_tag", None) and mod_update_info.get("exp_tag_diff", None): + if mod_update_info["exp_tag_diff"] == "add": + exp_comment_message += 'Remove `azext.isExperimental: true` in azext_{0}/azext_metadata.json'.format(mod) + elif mod_update_info["exp_tag_diff"] == "remove": + exp_comment_message += 'Set `azext.isExperimental` to `true` in azext_metadata.json for {0}'.format(mod) + elif mod_update_info.get("exp_tag", None) and not mod_update_info.get("exp_tag_diff", None): + if mod_update_info["exp_tag"] == "add": + exp_comment_message += 'Set `azext.isExperimental` to `true` in azext_metadata.json for {0}'.format(mod) + elif mod_update_info["exp_tag"] == "remove": + exp_comment_message += 'Remove `azext.isExperimental: true` in azext_{0}/azext_metadata.json'.format(mod) + mod_message.append(exp_comment_message) + + +def gen_comment_message(mod, mod_update_info, comment_message): + mod_message = [] + gen_history_comment_message(mod, mod_update_info, mod_message) + gen_version_comment_message(mod, mod_update_info, mod_message) + gen_preview_comment_message(mod, mod_update_info, mod_message) + gen_exp_comment_message(mod, mod_update_info, mod_message) + if len(mod_message): + comment_message.append("### Module: {0}".format(mod)) + comment_message += mod_message + + +def add_label_hint_message(comment_message): + comment_message.append("#### Notes") + # comment_message.append(" - Stable/preview tag is inherited from last release. " + # "If needed, please add `stable`/`preview` label to modify it.") + # comment_message.append(" - Major/minor/patch/pre increment of version number is calculated by pull request " + # "code changes automatically. " + # "If needed, please add `major`/`minor`/`patch`/`pre` label to adjust it.") + comment_message.append(DEFAULT_MESSAGE) + + +def save_comment_message(comment_message): + with open(result_path + "/" + output_file, "w") as f: + for line in comment_message: + f.write(line + "\n") + + +def save_label_output(): + with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: + print(f'BlockPR={block_pr}', file=fh) + add_label_dict = { + "labels": ["release-version-block"] + } + removed_label = "release-version-block" + if block_pr == 0: + with open(result_path + "/" + remove_labels_file, "w") as f: + f.write(removed_label + "\n") + else: + # add block label and empty release label file + with open(result_path + "/" + add_labels_file, "w") as f: + json.dump(add_label_dict, f) + with open(result_path + "/" + remove_labels_file, "w") as f: + pass + + +def main(): + print("Start calculate release version ...\n") + print("base_meta_path: ", base_meta_path) + print("diff_meta_path: ", diff_meta_path) + print("output_file: ", output_file) + print("changed_module_list: ", changed_module_list) + print("pr_label_list: ", pr_label_list) + comment_message = [] + modules_update_info = {} + if len(changed_module_list) == 0: + comment_message.append(DEFAULT_MESSAGE) + save_comment_message(comment_message) + save_label_output() + return + fill_module_update_info(modules_update_info) + if len(modules_update_info) == 0: + comment_message.append(DEFAULT_MESSAGE) + save_comment_message(comment_message) + save_label_output() + return + for mod, update_info in modules_update_info.items(): + gen_comment_message(mod, update_info, comment_message) + if len(comment_message): + add_suggest_header(comment_message) + add_label_hint_message(comment_message) + else: + comment_message.append(DEFAULT_MESSAGE) + print("comment_message:") + print(comment_message) + print("block_pr:", block_pr) + save_comment_message(comment_message) + save_label_output() + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/src/scripts/ci/service_name.py b/src/scripts/ci/service_name.py new file mode 100644 index 00000000000..a0b6e16e7b9 --- /dev/null +++ b/src/scripts/ci/service_name.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +""" +Check format of service_name.json. Command and AzureServiceName are required. Others are optional. +Each highest level command group should have reference in service_name.json. +""" +import json + +from azure.cli.core import MainCommandsLoader, AzCli +from azure.cli.core._help import AzCliHelp, CliCommandHelpFile +from azure.cli.core.commands import AzCliCommandInvoker, ExtensionCommandSource +from azure.cli.core.parser import AzCliCommandParser +from knack.help import GroupHelpFile + + +def get_extension_help_files(cli_ctx): + + # 1. Create invoker and load command table and arguments. Remember to turn off applicability check. + invoker = cli_ctx.invocation_cls(cli_ctx=cli_ctx, commands_loader_cls=cli_ctx.commands_loader_cls, + parser_cls=cli_ctx.parser_cls, help_cls=cli_ctx.help_cls) + cli_ctx.invocation = invoker + + invoker.commands_loader.skip_applicability = True + cmd_table = invoker.commands_loader.load_command_table(None) + + # turn off applicability check for all loaders + for loaders in invoker.commands_loader.cmd_to_loader_map.values(): + for loader in loaders: + loader.skip_applicability = True + + # filter the command table to only get commands from extensions + cmd_table = {k: v for k, v in cmd_table.items() if isinstance(v.command_source, ExtensionCommandSource)} + invoker.commands_loader.command_table = cmd_table + print('FOUND {} command(s) from the extension.'.format(len(cmd_table))) + + for cmd_name in cmd_table: + invoker.commands_loader.load_arguments(cmd_name) + + invoker.parser.load_command_table(invoker.commands_loader) + + # 2. Now load applicable help files + parser_keys = [] + parser_values = [] + sub_parser_keys = [] + sub_parser_values = [] + _store_parsers(invoker.parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values) + for cmd, parser in zip(parser_keys, parser_values): + if cmd not in sub_parser_keys: + sub_parser_keys.append(cmd) + sub_parser_values.append(parser) + help_ctx = cli_ctx.help_cls(cli_ctx=cli_ctx) + help_files = [] + for cmd, parser in zip(sub_parser_keys, sub_parser_values): + try: + help_file = GroupHelpFile(help_ctx, cmd, parser) if _is_group(parser) \ + else CliCommandHelpFile(help_ctx, cmd, parser) + help_file.load(parser) + help_files.append(help_file) + except Exception as ex: + print("Skipped '{}' due to '{}'".format(cmd, ex)) + help_files = sorted(help_files, key=lambda x: x.command) + return help_files + + +def _store_parsers(parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values): + for s in parser.subparsers.values(): + parser_keys.append(_get_parser_name(s)) + parser_values.append(s) + if _is_group(s): + for c in s.choices.values(): + sub_parser_keys.append(_get_parser_name(c)) + sub_parser_values.append(c) + _store_parsers(c, parser_keys, parser_values, sub_parser_keys, sub_parser_values) + + +def _get_parser_name(s): + return (s._prog_prefix if hasattr(s, '_prog_prefix') else s.prog)[3:] + + +def _is_group(parser): + return getattr(parser, '_subparsers', None) is not None \ + or getattr(parser, 'choices', None) is not None + + +def check(): + az_cli = AzCli(cli_name='az', + commands_loader_cls=MainCommandsLoader, + invocation_cls=AzCliCommandInvoker, + parser_cls=AzCliCommandParser, + help_cls=AzCliHelp) + help_files = get_extension_help_files(az_cli) + # High command represents left most word in a command, e.g., vm, disk. + high_command_set = set() + for help_file in help_files: + if help_file.command: + high_command_set.add(help_file.command.split()[0]) + print('high_command_set:') + print(high_command_set) + + # Load and check service_name.json + with open('src/service_name.json') as f: + service_names = json.load(f) + print('Verifying src/service_name.json') + service_name_map = {} + for service_name in service_names: + command = service_name['Command'] + service = service_name['AzureServiceName'] + if not command.startswith('az '): + raise Exception('{} does not start with az!'.format(command)) + if not service: + raise Exception('AzureServiceName of {} is empty!'.format(command)) + service_name_map[command[3:]] = service + print('service_name_map:') + print(service_name_map) + + # Check existence in service_name.json + for high_command in high_command_set: + if high_command not in service_name_map: + raise Exception('No entry of {} in service_name.json. Please add one to the file.'.format(high_command)) + + +if __name__ == "__main__": + check() diff --git a/src/scripts/ci/sync_extensions.py b/src/scripts/ci/sync_extensions.py new file mode 100644 index 00000000000..beec0213455 --- /dev/null +++ b/src/scripts/ci/sync_extensions.py @@ -0,0 +1,208 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +# pylint: disable=line-too-long +# pylint: disable=broad-except + +import os +import re +import json +import subprocess + +DEFAULT_TARGET_INDEX_URL = os.getenv('AZURE_EXTENSION_TARGET_INDEX_URL') +STORAGE_ACCOUNT = os.getenv('AZURE_EXTENSION_TARGET_STORAGE_ACCOUNT') +STORAGE_CONTAINER = os.getenv('AZURE_EXTENSION_TARGET_STORAGE_CONTAINER') +COMMIT_NUM = os.getenv('AZURE_EXTENSION_COMMIT_NUM') or 1 +BLOB_PREFIX = os.getenv('AZURE_EXTENSION_BLOB_PREFIX') + + +def _get_updated_extension_filenames(): + cmd = 'git --no-pager diff --diff-filter=ACMRT HEAD~{} -- src/index.json'.format(COMMIT_NUM) + updated_content = subprocess.check_output(cmd.split()).decode('utf-8') + FILENAME_REGEX = r'"filename":\s+"(.*?)"' + added_ext_filenames = {re.findall(FILENAME_REGEX, line)[0] for line in updated_content.splitlines() if line.startswith('+') and not line.startswith('+++') and 'filename' in line} + deleted_ext_filenames = {re.findall(FILENAME_REGEX, line)[0] for line in updated_content.splitlines() if line.startswith('-') and not line.startswith('---') and 'filename' in line} + return added_ext_filenames, deleted_ext_filenames + + +def download_file(url, file_path): + import requests + count = 3 + the_ex = None + while count > 0: + try: + response = requests.get(url, stream=True, allow_redirects=True) + assert response.status_code == 200, "Response code {}".format(response.status_code) + break + except Exception as ex: + the_ex = ex + count -= 1 + if count == 0: + msg = "Request for {} failed: {}".format(url, str(the_ex)) + print(msg) + raise Exception(msg) + + with open(file_path, 'wb') as f: + for chunk in response.iter_content(chunk_size=1024): + if chunk: # ignore keep-alive new chunks + f.write(chunk) + + +def _sync_wheel(ext, updated_indexes, failed_urls, overwrite, temp_dir): + download_url = ext['downloadUrl'] + whl_file = download_url.split('/')[-1] + whl_path = os.path.join(temp_dir, whl_file) + blob_name = f'{BLOB_PREFIX}/{whl_file}' if BLOB_PREFIX else whl_file + try: + download_file(download_url, whl_path) + except Exception: + failed_urls.append(download_url) + return + if not overwrite: + cmd = ['az', 'storage', 'blob', 'exists', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', + f'{STORAGE_ACCOUNT}', '--name', f'{blob_name}', '--auth-mode', 'login'] + result = subprocess.run(cmd, capture_output=True) + if result.stdout and json.loads(result.stdout)['exists']: + print("Skipping '{}' as it already exists...".format(whl_file)) + return + + cmd = ['az', 'storage', 'blob', 'upload', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', + f'{STORAGE_ACCOUNT}', '--name', f'{blob_name}', '--file', f'{os.path.abspath(whl_path)}', + '--auth-mode', 'login', '--overwrite'] + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + print(f"Failed to upload '{whl_file}' to the storage account") + raise + cmd = ['az', 'storage', 'blob', 'url', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', + f'{STORAGE_ACCOUNT}', '--name', f'{blob_name}', '--auth-mode', 'login'] + result = subprocess.run(cmd, capture_output=True) + print(result) + if result.stdout and result.returncode == 0: + url = json.loads(result.stdout) + else: + print("Failed to get the URL for '{}'".format(whl_file)) + raise + updated_index = ext + updated_index['downloadUrl'] = url + updated_indexes.append(updated_index) + + +def _update_target_extension_index(updated_indexes, deleted_ext_filenames, target_index_path): + NAME_REGEX = r'^(.*?)-\d+.\d+.\d+' + with open(target_index_path, 'r') as infile: + curr_index = json.loads(infile.read()) + for entry in updated_indexes: + filename = entry['filename'] + extension_name = re.findall(NAME_REGEX, filename)[0].replace('_', '-') + if extension_name not in curr_index['extensions'].keys(): + print("Adding '{}' to index...".format(filename)) + curr_index['extensions'][extension_name] = [entry] + else: + print("Updating '{}' in index...".format(filename)) + curr_entry = next((ext for ext in curr_index['extensions'][extension_name] if ext['filename'] == entry['filename']), None) + if curr_entry is not None: # in case of overwrite + curr_entry = entry + else: + curr_index['extensions'][extension_name].append(entry) + for filename in deleted_ext_filenames: + extension_name = re.findall(NAME_REGEX, filename)[0].replace('_', '-') + print("Deleting '{}' in index...".format(filename)) + curr_index['extensions'][extension_name] = [ext for ext in curr_index['extensions'][extension_name] if ext['filename'] != filename] + if not curr_index['extensions'][extension_name]: + del curr_index['extensions'][extension_name] + + with open(os.path.join(target_index_path), 'w') as outfile: + outfile.write(json.dumps(curr_index, indent=4, sort_keys=True)) + + +def main(): + import shutil + import tempfile + + net_added_ext_filenames = [] + net_deleted_ext_filenames = [] + sync_all = (os.getenv('AZURE_SYNC_ALL_EXTENSIONS') and os.getenv('AZURE_SYNC_ALL_EXTENSIONS').lower() == 'true') + if not sync_all: + added_ext_filenames, deleted_ext_filenames = _get_updated_extension_filenames() + # when there are large amount of changes, for instance deleting a lot of old versions of extensions, + # git may not accurately recognize the right changes, so we need to compare added filenames and deleted filenames + # to get the real changed ones. + net_added_ext_filenames = added_ext_filenames - deleted_ext_filenames + net_deleted_ext_filenames = deleted_ext_filenames - added_ext_filenames + if not net_added_ext_filenames and not net_deleted_ext_filenames: + print('index.json not changed. End task.') + return + temp_dir = tempfile.mkdtemp() + with open('src/index.json', 'r') as fd: + current_extensions = json.loads(fd.read()).get("extensions") + + target_index = DEFAULT_TARGET_INDEX_URL + os.mkdir(os.path.join(temp_dir, 'target')) + target_index_path = os.path.join(temp_dir, 'target', 'index.json') + try: + download_file(target_index, target_index_path) + except Exception as ex: + if sync_all and '404' in str(ex): + initial_index = {"extensions": {}, "formatVersion": "1"} + open(target_index_path, 'w').write(json.dumps(initial_index, indent=4, sort_keys=True)) + else: + raise + updated_indexes = [] + failed_urls = [] + if sync_all: + print('Syncing all extensions...\n') + # backup the old index.json + backup_index_name = f'{BLOB_PREFIX}/index.json.sav' if BLOB_PREFIX else 'index.json.sav' + cmd = ['az', 'storage', 'blob', 'upload', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', + f'{STORAGE_ACCOUNT}', '--name', f'{backup_index_name}', + '--file', f'{os.path.abspath(target_index_path)}', '--auth-mode', 'login', '--overwrite'] + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + print(f"Failed to upload '{target_index_path}' to the storage account") + raise + # start with an empty index.json to sync all extensions + initial_index = {"extensions": {}, "formatVersion": "1"} + open(target_index_path, 'w').write(json.dumps(initial_index, indent=4, sort_keys=True)) + for extension_name in current_extensions.keys(): + for ext in current_extensions[extension_name]: + print('Uploading {}'.format(ext['filename'])) + _sync_wheel(ext, updated_indexes, failed_urls, True, temp_dir) + else: + NAME_REGEX = r'^(.*?)-\d+.\d+.\d+' + for filename in net_added_ext_filenames: + extension_name = re.findall(NAME_REGEX, filename)[0].replace('_', '-') + print('Uploading {}'.format(filename)) + ext = current_extensions[extension_name][-1] + if ext['filename'] != filename: + ext = next((ext for ext in current_extensions[extension_name] if ext['filename'] == filename), None) + if ext is not None: + _sync_wheel(ext, updated_indexes, failed_urls, True, temp_dir) + + print("") + _update_target_extension_index(updated_indexes, net_deleted_ext_filenames, target_index_path) + index_name = f'{BLOB_PREFIX}/index.json' if BLOB_PREFIX else 'index.json' + cmd = ['az', 'storage', 'blob', 'upload', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', + f'{STORAGE_ACCOUNT}', '--name', f'{index_name}', '--file', f'{os.path.abspath(target_index_path)}', + '--auth-mode', 'login', '--overwrite'] + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + print(f"Failed to upload '{target_index_path}' to the storage account") + raise + print("\nSync finished.") + if updated_indexes: + print("New extensions available in:") + for updated_index in updated_indexes: + print(updated_index['downloadUrl']) + shutil.rmtree(temp_dir) + + if failed_urls: + print("\nFailed to download and sync the following files. They are skipped:") + for url in failed_urls: + print(url) + print("") + raise Exception("Failed to sync some packages.") + + +if __name__ == '__main__': + main() diff --git a/src/scripts/ci/sync_extensions.sh b/src/scripts/ci/sync_extensions.sh new file mode 100644 index 00000000000..f47d2ace5da --- /dev/null +++ b/src/scripts/ci/sync_extensions.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -ev +pip install requests + +echo $(pwd) + +python scripts/ci/sync_extensions.py diff --git a/src/scripts/ci/test_index.py b/src/scripts/ci/test_index.py new file mode 100644 index 00000000000..093ebaa7388 --- /dev/null +++ b/src/scripts/ci/test_index.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +""" Test the index and the wheels from both the index and from source extensions in repository """ + +from __future__ import print_function + +import glob +import hashlib +import json +import logging +import os +import shutil +import tempfile +import unittest + +from packaging import version +from util import SRC_PATH +from wheel.install import WHEEL_INFO_RE + +from util import get_ext_metadata, get_whl_from_url, get_index_data + + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) +ch = logging.StreamHandler() +ch.setLevel(logging.DEBUG) +logger.addHandler(ch) + + +def get_sha256sum(a_file): + sha256 = hashlib.sha256() + with open(a_file, 'rb') as f: + sha256.update(f.read()) + return sha256.hexdigest() + + +def check_min_version(extension_name, metadata): + if 'azext.minCliCoreVersion' not in metadata: + try: + azext_metadata = glob.glob(os.path.join(SRC_PATH, extension_name, 'azext_*', 'azext_metadata.json'))[0] + with open(azext_metadata, 'r') as f: + metadata = json.load(f) + if not metadata.get('azext.minCliCoreVersion'): + raise AssertionError(f'{extension_name} can not get azext.minCliCoreVersion') + except Exception as e: + logger.error(f'{extension_name} can not get azext.minCliCoreVersion: {e}') + raise e + + +class TestIndex(unittest.TestCase): + + @classmethod + def setUpClass(cls): + cls.longMessage = True + cls.index = get_index_data() + cls.whl_cache_dir = tempfile.mkdtemp() + cls.whl_cache = {} + + @classmethod + def tearDownClass(cls): + shutil.rmtree(cls.whl_cache_dir) + + def test_format_version(self): + self.assertEqual(self.index['formatVersion'], '1') + + def test_format_extensions_key(self): + self.assertIn('extensions', self.index) + + def test_format_extensions_value(self): + self.assertIsInstance(self.index['extensions'], dict) + + def test_extension_filenames(self): + for ext_name, exts in self.index['extensions'].items(): + self.assertEqual(ext_name.find('_'), -1, "Extension names should not contain underscores. " + "Found {}".format(ext_name)) + for item in exts: + self.assertTrue(item['filename'].endswith('.whl'), + "Filename {} must end with .whl".format(item['filename'])) + self.assertEqual(ext_name, item['metadata']['name'], + "Extension name mismatch in extensions['{}']. " + "Found an extension in the list with name " + "{}".format(ext_name, item['metadata']['name'])) + # Due to https://github.com/pypa/wheel/issues/235 we prevent whls built with 0.31.0 or greater. + # 0.29.0, 0.30.0 are the two previous versions before that release. + parsed_filename = WHEEL_INFO_RE(item['filename']) + p = parsed_filename.groupdict() + self.assertTrue(p.get('name'), "Can't get name for {}".format(item['filename'])) + built_wheel = p.get('abi') == 'none' and p.get('plat') == 'any' + self.assertTrue(built_wheel, + "{} of {} not platform independent wheel. " + "It should end in -none-any.whl".format(item['filename'], ext_name)) + + def test_extension_url_filename(self): + for exts in self.index['extensions'].values(): + for item in exts: + self.assertEqual(os.path.basename(item['downloadUrl']), item['filename'], + "Filename must match last segment of downloadUrl") + + def test_extension_url_pypi(self): + for exts in self.index['extensions'].values(): + for item in exts: + url = item['downloadUrl'] + pypi_url_prefix = 'https://pypi.python.org/packages/' + pythonhosted_url_prefix = 'https://files.pythonhosted.org/packages/' + if url.startswith(pypi_url_prefix): + new_url = url.replace(pypi_url_prefix, pythonhosted_url_prefix) + hash_pos = new_url.find('#') + new_url = new_url if hash_pos == -1 else new_url[:hash_pos] + self.fail("Replace {} with {}\n" + "See for more info https://wiki.archlinux.org/index.php/Python_package_guidelines" + "#PyPI_download_URLs".format(url, new_url)) + + def test_filename_duplicates(self): + filenames = [] + for exts in self.index['extensions'].values(): + for item in exts: + filenames.append(item['filename']) + filename_seen = set() + dups = [] + for f in filenames: + if f in filename_seen: + dups.append(f) + filename_seen.add(f) + self.assertFalse(dups, "Duplicate filenames found {}".format(dups)) + + @unittest.skipUnless(os.getenv('CI'), 'Skipped as not running on CI') + def test_checksums(self): + for exts in self.index['extensions'].values(): + # only test the latest version + item = max(exts, key=lambda ext: version.parse(ext['metadata']['version'])) + ext_file = get_whl_from_url(item['downloadUrl'], item['filename'], + self.whl_cache_dir, self.whl_cache) + print(ext_file) + computed_hash = get_sha256sum(ext_file) + self.assertEqual(computed_hash, item['sha256Digest'], + "Computed {} but found {} in index for {}".format(computed_hash, + item['sha256Digest'], + item['filename'])) + + @unittest.skipUnless(os.getenv('CI'), 'Skipped as not running on CI') + def test_metadata(self): + skipable_extension_thresholds = { + 'ip-group': '0.1.2', + 'vm-repair': '0.3.1', + 'mixed-reality': '0.0.2', + 'subscription': '0.1.4', + 'managementpartner': '0.1.3', + 'log-analytics': '0.2.1' + } + + historical_extensions = { + 'keyvault-preview': '0.1.3', + 'log-analytics': '0.2.1' + } + + extensions_dir = tempfile.mkdtemp() + for ext_name, exts in self.index['extensions'].items(): + # only test the latest version + item = max(exts, key=lambda ext: version.parse(ext['metadata']['version'])) + ext_dir = tempfile.mkdtemp(dir=extensions_dir) + ext_file = get_whl_from_url(item['downloadUrl'], item['filename'], + self.whl_cache_dir, self.whl_cache) + + print(ext_file) + + ext_version = item['metadata']['version'] + try: + metadata = get_ext_metadata(ext_dir, ext_file, ext_name) # check file exists + except ValueError as ex: + if ext_name in skipable_extension_thresholds: + threshold_version = skipable_extension_thresholds[ext_name] + + if version.parse(ext_version) <= version.parse(threshold_version): + continue + else: + raise ex + else: + raise ex + + try: + # check key properties exists + check_min_version(ext_name, metadata) + except AssertionError as ex: + if ext_name in historical_extensions: + threshold_version = historical_extensions[ext_name] + + if version.parse(ext_version) <= version.parse(threshold_version): + continue + else: + raise ex + else: + raise ex + + # Due to https://github.com/pypa/wheel/issues/195 we prevent whls built with 0.31.0 or greater. + # 0.29.0, 0.30.0 are the two previous versions before that release. + supported_generators = ['bdist_wheel (0.29.0)', 'bdist_wheel (0.30.0)'] + self.assertIn(metadata.get('generator'), supported_generators, + "{}: 'generator' should be one of {}. " + "Build the extension with a different version of the 'wheel' package " + "(e.g. `pip install wheel==0.30.0`). " + "This is due to https://github.com/pypa/wheel/issues/195".format(ext_name, + supported_generators)) + self.assertDictEqual(metadata, item['metadata'], + "Metadata for {} in index doesn't match the expected of: \n" + "{}".format(item['filename'], json.dumps(metadata, indent=2, sort_keys=True, + separators=(',', ': ')))) + + shutil.rmtree(extensions_dir) + + +if __name__ == '__main__': + unittest.main() diff --git a/src/scripts/ci/test_index_ref_doc.sh b/src/scripts/ci/test_index_ref_doc.sh new file mode 100644 index 00000000000..5aa4815eb05 --- /dev/null +++ b/src/scripts/ci/test_index_ref_doc.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +set -ex + +# Install CLI +echo "Installing azure-cli..." + +pip install --pre azure-cli --extra-index-url https://azurecliprod.blob.core.windows.net/edge -q +pip install sphinx==1.7.0 Jinja2==3.0.3 +echo "Installed." + +python ./scripts/ci/index_ref_doc.py -v + +echo "OK." diff --git a/src/scripts/ci/test_init.py b/src/scripts/ci/test_init.py new file mode 100644 index 00000000000..74cb45c9a26 --- /dev/null +++ b/src/scripts/ci/test_init.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from util import SRC_PATH +import logging +import os +import sys + +logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) +ch = logging.StreamHandler() +ch.setLevel(logging.DEBUG) +logger.addHandler(ch) + + +check_path = 'vendored_sdks' + + +def check_init_files(): + """ Check if the vendored_sdks directory contains __init__.py in all extensions """ + ref = [] + # SRC_PATH: azure-cli-extensions\src + for src_d in os.listdir(SRC_PATH): + # src_d: azure-cli-extensions\src\ext_name + src_d_full = os.path.join(SRC_PATH, src_d) + if os.path.isdir(src_d_full): + for d in os.listdir(src_d_full): + if d.startswith('azext_'): + # root_dir: azure-cli-extensions\src\ext_name\azext_ext_name + ref.append(check_init_recursive(os.path.join(src_d_full, d))) + return ref + + +def check_init_recursive(root_dir): + """ Check if a extension contains __init__.py + :param root_dir: azure-cli-extensions\src\{ext_name}\azext_{ext_name} + :param dirpath: azure-cli-extensions\src\{ext_name}\azext_{ext_name} + :param dirnames: all directories under dirpath, type: List[str] + :param filenames: all files under dirpath, type: List[str] + """ + error_flag = False + for (dirpath, dirnames, filenames) in os.walk(root_dir): + if dirpath.endswith(check_path): + # Check __init__.py not exists in the vendored_sdks dir and it contains at least one file + if '__init__.py' not in filenames and not is_empty_dir(dirpath): + logger.error(f'Directory {dirpath} does not contain __init__.py, please add it.') + error_flag = True + return error_flag + + +def is_empty_dir(root_dir): + """ Check if the directory did not contain any file """ + for (dirpath, dirnames, filenames) in os.walk(root_dir): + if filenames: + return False + return True + + +if __name__ == '__main__': + ref = check_init_files() + sys.exit(1) if any(ref) else sys.exit(0) diff --git a/src/scripts/ci/test_source.py b/src/scripts/ci/test_source.py new file mode 100644 index 00000000000..94ddf5bafd7 --- /dev/null +++ b/src/scripts/ci/test_source.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from __future__ import print_function + +import logging +import os +import sys +import tempfile +import shutil +import shlex + +from subprocess import check_output, CalledProcessError, run +from util import SRC_PATH + +logger = logging.getLogger(__name__) + +ALL_TESTS = [] + +for src_d in os.listdir(SRC_PATH): + src_d_full = os.path.join(SRC_PATH, src_d) + if not os.path.isdir(src_d_full): + continue + pkg_name = next((d for d in os.listdir(src_d_full) if d.startswith('azext_')), None) + + # If running in Travis CI, only run tests for edited extensions + commit_range = os.environ.get('TRAVIS_COMMIT_RANGE') + if commit_range and not check_output(['git', '--no-pager', 'diff', '--name-only', commit_range, '--', src_d_full]): + continue + + # Running in Azure DevOps + cmd_tpl = 'git --no-pager diff --name-only origin/{commit_start} {commit_end} -- {code_dir}' + ado_branch_last_commit = os.environ.get('ADO_PULL_REQUEST_LATEST_COMMIT') + ado_target_branch = os.environ.get('ADO_PULL_REQUEST_TARGET_BRANCH') + if ado_branch_last_commit and ado_target_branch: + if ado_branch_last_commit == '$(System.PullRequest.SourceCommitId)': + # default value if ADO_PULL_REQUEST_LATEST_COMMIT not set in ADO + continue + elif ado_target_branch == '$(System.PullRequest.TargetBranch)': + # default value if ADO_PULL_REQUEST_TARGET_BRANCH not set in ADO + continue + else: + cmd = cmd_tpl.format(commit_start=ado_target_branch, commit_end=ado_branch_last_commit, code_dir=src_d_full) + if not check_output(shlex.split(cmd)): + continue + + # Find the package and check it has tests + if pkg_name and os.path.isdir(os.path.join(src_d_full, pkg_name, 'tests')): + ALL_TESTS.append((pkg_name, src_d_full)) + +logger.warning(f'ado_branch_last_commit: {ado_branch_last_commit}, ' + f'ado_target_branch: {ado_target_branch}, ' + f'ALL_TESTS: {ALL_TESTS}.') + + +def run_command(cmd, check_return_code=False, cwd=None): + logger.info(f'cmd: {cmd}') + out = run(cmd, check=True, cwd=cwd) + if check_return_code and out.returncode: + raise RuntimeError(f"{cmd} failed") + + +def test_extension(): + for pkg_name, ext_path in ALL_TESTS: + ext_name = ext_path.split('/')[-1] + logger.info(f'installing extension: {ext_name}') + cmd = ['azdev', 'extension', 'add', ext_name] + run_command(cmd, check_return_code=True) + + # Use azext_$ext_name, a unique long name for testing, to avoid the following error when the main module and extension name have the same name: + # 'containerapp' exists in both 'azext_containerapp' and 'containerapp'. Resolve using `azext_containerapp.containerapp` or `containerapp.containerapp` + # 'containerapp' not found. If newly added, re-run with --discover + # No tests selected to run. + # ---------------------------------------------------------------------- + # For the recommended azdev test example, please refer to: `azdev test --help` + # `python -m azdev test --no-exitfirst --discover --verbose azext_containerapp` + test_args = [sys.executable, '-m', 'azdev', 'test', '--no-exitfirst', '--discover', '--verbose', pkg_name] + logger.warning(f'test_args: {test_args}') + + run_command(test_args, check_return_code=True) + logger.info(f'uninstalling extension: {ext_name}') + cmd = ['azdev', 'extension', 'remove', ext_name] + run_command(cmd, check_return_code=True) + + +def test_source_wheels(): + # Test we can build all sources into wheels and that metadata from the wheel is valid + built_whl_dir = tempfile.mkdtemp() + source_extensions = [os.path.join(SRC_PATH, n) for n in os.listdir(SRC_PATH) + if os.path.isdir(os.path.join(SRC_PATH, n))] + for s in source_extensions: + if not os.path.isfile(os.path.join(s, 'setup.py')): + continue + try: + check_output(['python', 'setup.py', 'bdist_wheel', '-q', '-d', built_whl_dir], cwd=s) + except CalledProcessError as err: + raise("Unable to build extension {} : {}".format(s, err)) + shutil.rmtree(built_whl_dir) + + +if __name__ == '__main__': + test_extension() + test_source_wheels() diff --git a/src/scripts/ci/update_ext_cmd_tree.py b/src/scripts/ci/update_ext_cmd_tree.py new file mode 100644 index 00000000000..6480d073a62 --- /dev/null +++ b/src/scripts/ci/update_ext_cmd_tree.py @@ -0,0 +1,112 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import filecmp +import json +import os +import subprocess +import sys +from azure.cli.core import get_default_cli +from azure.cli.core._session import Session +from azure.cli.core.commands import _load_extension_command_loader +from azure.cli.core.extension import get_extension_modname, get_extension_path +from sync_extensions import download_file + +STORAGE_ACCOUNT = os.getenv('AZURE_EXTENSION_CMD_TREE_STORAGE_ACCOUNT') +STORAGE_CONTAINER = os.getenv('AZURE_EXTENSION_CMD_TREE_STORAGE_CONTAINER') +BLOB_PREFIX = os.getenv('AZURE_EXTENSION_CMD_TREE_BLOB_PREFIX') + +az_cli = get_default_cli() +file_name = 'extCmdTreeToUpload.json' + + +def merge(data, key, value): + if isinstance(value, str): + if key in data: + raise Exception(f"Key: {key} already exists in {data[key]}. 2 extensions cannot have the same command!") + data[key] = value + else: + data.setdefault(key, {}) + for k, v in value.items(): + merge(data[key], k, v) + + +def update_cmd_tree(ext_name): + print(f"Processing {ext_name}") + + ext_dir = get_extension_path(ext_name) + ext_mod = get_extension_modname(ext_name, ext_dir=ext_dir) + + invoker = az_cli.invocation_cls(cli_ctx=az_cli, commands_loader_cls=az_cli.commands_loader_cls, + parser_cls=az_cli.parser_cls, help_cls=az_cli.help_cls) + az_cli.invocation = invoker + + sys.path.append(ext_dir) + extension_command_table, _ = _load_extension_command_loader(invoker.commands_loader, None, ext_mod) + + EXT_CMD_TREE_TO_UPLOAD = Session(encoding='utf-8') + EXT_CMD_TREE_TO_UPLOAD.load(os.path.expanduser(os.path.join('~', '.azure', file_name))) + root = {} + for cmd_name, ext_cmd in extension_command_table.items(): + try: + # do not include hidden deprecated command + if ext_cmd.deprecate_info.hide: + print(f"Skip hidden deprecated command: {cmd_name}") + continue + except AttributeError: + pass + parts = cmd_name.split() + parent = root + for i, part in enumerate(parts): + if part in parent: + pass + elif i == len(parts) - 1: + parent[part] = ext_name + else: + parent[part] = {} + parent = parent[part] + print(root) + for k, v in root.items(): + merge(EXT_CMD_TREE_TO_UPLOAD.data, k, v) + EXT_CMD_TREE_TO_UPLOAD.save_with_retry() + + +def upload_cmd_tree(): + blob_file_name = 'extensionCommandTree.json' + if BLOB_PREFIX: + blob_file_name = f'{BLOB_PREFIX}/{blob_file_name}' + downloaded_file_name = 'extCmdTreeDownloaded.json' + file_path = os.path.expanduser(os.path.join('~', '.azure', file_name)) + + cmd = ['az', 'storage', 'blob', 'upload', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', + f'{STORAGE_ACCOUNT}', '--name', f'{blob_file_name}', '--file', f'{file_path}', '--auth-mode', 'login', + '--overwrite'] + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + print(f"Failed to upload '{blob_file_name}' to the storage account") + print(result) + + cmd = ['az', 'storage', 'blob', 'url', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', + f'{STORAGE_ACCOUNT}', '--name', f'{blob_file_name}', '--auth-mode', 'login'] + result = subprocess.run(cmd, capture_output=True) + if result.stdout and result.returncode == 0: + url = json.loads(result.stdout) + else: + print(f"Failed to get the URL for '{blob_file_name}'") + raise + + download_file_path = os.path.expanduser(os.path.join('~', '.azure', downloaded_file_name)) + download_file(url, download_file_path) + if filecmp.cmp(file_path, download_file_path): + print("extensionCommandTree.json uploaded successfully. URL: {}".format(url)) + else: + raise Exception("Failed to update extensionCommandTree.json in the storage account") + + +if __name__ == '__main__': + for ext in sys.argv[1:]: + update_cmd_tree(ext) + print() + upload_cmd_tree() diff --git a/src/scripts/ci/update_index.py b/src/scripts/ci/update_index.py new file mode 100644 index 00000000000..28f10f99177 --- /dev/null +++ b/src/scripts/ci/update_index.py @@ -0,0 +1,68 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import hashlib +import json +import re +import sys +import tempfile + +from util import get_ext_metadata, get_whl_from_url + +NAME_REGEX = r'.*/([^/]*)-\d+.\d+.\d+' + + +def get_sha256sum(a_file): + sha256 = hashlib.sha256() + with open(a_file, 'rb') as f: + sha256.update(f.read()) + return sha256.hexdigest() + + +def main(): + + # Get extension WHL from URL + whl_path = None + try: + whl_path = sys.argv[1] + except IndexError: + pass + if not whl_path or not whl_path.endswith('.whl') or not whl_path.startswith('https:'): + raise ValueError('incorrect usage: update_script ') + + # Extract the extension name + try: + extension_name = re.findall(NAME_REGEX, whl_path)[0] + extension_name = extension_name.replace('_', '-') + except IndexError: + raise ValueError('unable to parse extension name') + + extensions_dir = tempfile.mkdtemp() + ext_dir = tempfile.mkdtemp(dir=extensions_dir) + whl_cache_dir = tempfile.mkdtemp() + whl_cache = {} + ext_file = get_whl_from_url(whl_path, extension_name, whl_cache_dir, whl_cache) + + with open('./src/index.json', 'r') as infile: + curr_index = json.loads(infile.read()) + + try: + entry = curr_index['extensions'][extension_name] + except IndexError: + raise ValueError('{} not found in index.json'.format(extension_name)) + + entry[0]['downloadUrl'] = whl_path + entry[0]['sha256Digest'] = get_sha256sum(ext_file) + entry[0]['filename'] = whl_path.split('/')[-1] + entry[0]['metadata'] = get_ext_metadata(ext_dir, ext_file, extension_name) + + # update index and write back to file + curr_index['extensions'][extension_name] = entry + with open('./src/index.json', 'w') as outfile: + outfile.write(json.dumps(curr_index, indent=4, sort_keys=True)) + + +if __name__ == '__main__': + main() diff --git a/src/scripts/ci/util.py b/src/scripts/ci/util.py new file mode 100644 index 00000000000..ffc7d54797b --- /dev/null +++ b/src/scripts/ci/util.py @@ -0,0 +1,165 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import logging +import os +import re +import shlex +import json +import zipfile + +from subprocess import check_output + +logger = logging.getLogger(__name__) + +# copy from wheel==0.30.0 +WHEEL_INFO_RE = re.compile( + r"""^(?P(?P.+?)(-(?P\d.+?))?) + ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?) + \.whl|\.dist-info)$""", + re.VERBOSE).match + + +def get_repo_root(): + current_dir = os.path.dirname(os.path.abspath(__file__)) + while not os.path.exists(os.path.join(current_dir, 'CONTRIBUTING.rst')): + current_dir = os.path.dirname(current_dir) + return current_dir + + +def _get_extension_modname(ext_dir): + # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L153 + EXTENSIONS_MOD_PREFIX = 'azext_' + pos_mods = [n for n in os.listdir(ext_dir) + if n.startswith(EXTENSIONS_MOD_PREFIX) and os.path.isdir(os.path.join(ext_dir, n))] + if len(pos_mods) != 1: + raise AssertionError("Expected 1 module to load starting with " + "'{}': got {}".format(EXTENSIONS_MOD_PREFIX, pos_mods)) + return pos_mods[0] + + +def _get_azext_metadata(ext_dir): + # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L109 + AZEXT_METADATA_FILENAME = 'azext_metadata.json' + azext_metadata = None + ext_modname = _get_extension_modname(ext_dir=ext_dir) + azext_metadata_filepath = os.path.join(ext_dir, ext_modname, AZEXT_METADATA_FILENAME) + if os.path.isfile(azext_metadata_filepath): + with open(azext_metadata_filepath) as f: + azext_metadata = json.load(f) + return azext_metadata + + +def get_ext_metadata(ext_dir, ext_file, ext_name): + # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L89 + WHL_METADATA_FILENAME = 'metadata.json' + zip_ref = zipfile.ZipFile(ext_file, 'r') + zip_ref.extractall(ext_dir) + zip_ref.close() + metadata = {} + dist_info_dirs = [f for f in os.listdir(ext_dir) if f.endswith('.dist-info')] + + azext_metadata = _get_azext_metadata(ext_dir) + + if not azext_metadata: + raise ValueError('azext_metadata.json for Extension "{}" Metadata is missing'.format(ext_name)) + + metadata.update(azext_metadata) + + for dist_info_dirname in dist_info_dirs: + parsed_dist_info_dir = WHEEL_INFO_RE(dist_info_dirname) + if parsed_dist_info_dir and parsed_dist_info_dir.groupdict().get('name') == ext_name.replace('-', '_'): + whl_metadata_filepath = os.path.join(ext_dir, dist_info_dirname, WHL_METADATA_FILENAME) + if os.path.isfile(whl_metadata_filepath): + with open(whl_metadata_filepath) as f: + metadata.update(json.load(f)) + return metadata + + +def get_whl_from_url(url, filename, tmp_dir, whl_cache=None): + if not whl_cache: + whl_cache = {} + if url in whl_cache: + return whl_cache[url] + import requests + TRIES = 3 + for try_number in range(TRIES): + try: + r = requests.get(url, stream=True) + assert r.status_code == 200, "Request to {} failed with {}".format(url, r.status_code) + break + except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as err: + import time + time.sleep(0.5) + continue + + ext_file = os.path.join(tmp_dir, filename) + with open(ext_file, 'wb') as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: # ignore keep-alive new chunks + f.write(chunk) + whl_cache[url] = ext_file + return ext_file + + +SRC_PATH = os.path.join(get_repo_root(), 'src') +INDEX_PATH = os.path.join(SRC_PATH, 'index.json') + + +def _catch_dup_keys(pairs): + seen = {} + for k, v in pairs: + if k in seen: + raise ValueError("duplicate key {}".format(k)) + seen[k] = v + return seen + + +def get_index_data(): + try: + with open(INDEX_PATH) as f: + return json.load(f, object_pairs_hook=_catch_dup_keys) + except ValueError as err: + raise AssertionError("Invalid JSON in {}: {}".format(INDEX_PATH, err)) + + +def diff_code(start, end): + diff_ref = [] + + for src_d in os.listdir(SRC_PATH): + src_d_full = os.path.join(SRC_PATH, src_d) + if not os.path.isdir(src_d_full): + continue + pkg_name = next((d for d in os.listdir(src_d_full) if d.startswith('azext_')), None) + + # If running in Travis CI, only run tests for edited extensions + commit_range = os.environ.get('TRAVIS_COMMIT_RANGE') + if commit_range and not check_output( + ['git', '--no-pager', 'diff', '--name-only', commit_range, '--', src_d_full]): + continue + + # Running in Azure DevOps + cmd_tpl = 'git --no-pager diff --name-only origin/{start} {end} -- {code_dir}' + # ado_branch_last_commit = os.environ.get('ADO_PULL_REQUEST_LATEST_COMMIT') + # ado_target_branch = os.environ.get('ADO_PULL_REQUEST_TARGET_BRANCH') + if start and end: + if end == '$(System.PullRequest.SourceCommitId)': + # default value if ADO_PULL_REQUEST_LATEST_COMMIT not set in ADO + continue + elif start == '$(System.PullRequest.TargetBranch)': + # default value if ADO_PULL_REQUEST_TARGET_BRANCH not set in ADO + continue + else: + cmd = cmd_tpl.format(start=start, end=end, + code_dir=src_d_full) + if not check_output(shlex.split(cmd)): + continue + + diff_ref.append((pkg_name, src_d_full)) + + logger.warning(f'start: {start}, ' + f'end: {end}, ' + f'diff_ref: {diff_ref}.') + return diff_ref diff --git a/src/scripts/ci/verify_codeowners.py b/src/scripts/ci/verify_codeowners.py new file mode 100644 index 00000000000..e56b16c6537 --- /dev/null +++ b/src/scripts/ci/verify_codeowners.py @@ -0,0 +1,42 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from __future__ import print_function + +import os +import sys + +from util import get_repo_root + +REPO_ROOT = get_repo_root() +CODEOWNERS = os.path.join(REPO_ROOT, '.github', 'CODEOWNERS') +SRC_DIR = os.path.join(REPO_ROOT, 'src') + + +def get_src_dir_codeowners(): + contents = [] + with open(CODEOWNERS) as f: + contents = [x.strip() for x in f.readlines()] + return dict([x.split(' ', 1) for x in contents if x.startswith('/src/') and x.split(' ')[0].endswith('/')]) + + +def main(): + owners = get_src_dir_codeowners() + dangling_entries = [e for e in owners if not os.path.isdir(os.path.join(REPO_ROOT, e[1:]))] + missing_entries = ['/src/{}/'.format(p) for p in os.listdir(SRC_DIR) + if os.path.isdir(os.path.join(SRC_DIR, p)) and '/src/{}/'.format(p) not in owners] + if dangling_entries or missing_entries: + print('Errors whilst verifying {}!'.format(CODEOWNERS)) + if dangling_entries: + print("Remove the following {} as these directories don't exist.".format(dangling_entries), + file=sys.stderr) + if missing_entries: + print("The following directories are missing codeowners {}.".format(missing_entries), + file=sys.stderr) + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/src/scripts/refdoc/README.md b/src/scripts/refdoc/README.md new file mode 100644 index 00000000000..ab71c0bc0cd --- /dev/null +++ b/src/scripts/refdoc/README.md @@ -0,0 +1,26 @@ +# Ref doc gen # + +Scripts for reference documentation generation for Azure CLI Extensions using [sphinx](http://www.sphinx-doc.org/en/master/) + +# How to generate the Sphinx help file output # + +## Set up environment ## + +1. Ensure the CLI is installed in your Python virtual environment. +2. Inside the Python virtual environment, run `pip install sphinx==1.7.0` + +## Run Sphinx ## + +1. Run the generate script `python scripts/refdoc/generate.py -e PATH_TO_WHL.whl` + +## Retrieve output ## + +1. By default, the XML output is stored in `ref-doc-out-*/ind.xml` + +## Generating Sphinx output for the latest versions of all extensions in index ## + +1. Ensure the CLI is installed in your Python virtual environment. +2. Inside the Python virtual environment, run `pip install sphinx==1.7.0` +3. Set the environment variable `AZ_EXT_REF_DOC_OUT_DIR` to an empty directory that exists. +4. Run the following script to generate sphinx output for the latest versions of all extensions in the index - `python ./scripts/ci/index_ref_doc.py -v` +5. The sphinx output will be in the directory pointed to by the `AZ_EXT_REF_DOC_OUT_DIR` environment variable. \ No newline at end of file diff --git a/src/scripts/refdoc/azhelpgen/__init__.py b/src/scripts/refdoc/azhelpgen/__init__.py new file mode 100644 index 00000000000..34913fb394d --- /dev/null +++ b/src/scripts/refdoc/azhelpgen/__init__.py @@ -0,0 +1,4 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- diff --git a/src/scripts/refdoc/azhelpgen/azhelpgen.py b/src/scripts/refdoc/azhelpgen/azhelpgen.py new file mode 100644 index 00000000000..9daadc7d1e5 --- /dev/null +++ b/src/scripts/refdoc/azhelpgen/azhelpgen.py @@ -0,0 +1,190 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import argparse +import json +from os.path import expanduser +from docutils import nodes +from docutils.statemachine import ViewList +from docutils.parsers.rst import Directive +from sphinx.util.nodes import nested_parse_with_titles + +from knack.help_files import helps + +from knack.help import GroupHelpFile +from azure.cli.core import MainCommandsLoader, AzCli +from azure.cli.core.commands import AzCliCommandInvoker, ExtensionCommandSource +from azure.cli.core.parser import AzCliCommandParser +from azure.cli.core._help import AzCliHelp, CliCommandHelpFile, ArgumentGroupRegistry + +USER_HOME = expanduser('~') + + +def get_extension_help_files(cli_ctx): + + # 1. Create invoker and load command table and arguments. Remember to turn off applicability check. + invoker = cli_ctx.invocation_cls(cli_ctx=cli_ctx, commands_loader_cls=cli_ctx.commands_loader_cls, + parser_cls=cli_ctx.parser_cls, help_cls=cli_ctx.help_cls) + cli_ctx.invocation = invoker + + invoker.commands_loader.skip_applicability = True + cmd_table = invoker.commands_loader.load_command_table(None) + + # turn off applicability check for all loaders + for loaders in invoker.commands_loader.cmd_to_loader_map.values(): + for loader in loaders: + loader.skip_applicability = True + + # filter the command table to only get commands from extensions + cmd_table = {k: v for k, v in cmd_table.items() if isinstance(v.command_source, ExtensionCommandSource)} + invoker.commands_loader.command_table = cmd_table + print('FOUND {} command(s) from the extension.'.format(len(cmd_table))) + + for cmd_name in cmd_table: + invoker.commands_loader.load_arguments(cmd_name) + + invoker.parser.load_command_table(invoker.commands_loader) + + # 2. Now load applicable help files + parser_keys = [] + parser_values = [] + sub_parser_keys = [] + sub_parser_values = [] + _store_parsers(invoker.parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values) + for cmd, parser in zip(parser_keys, parser_values): + if cmd not in sub_parser_keys: + sub_parser_keys.append(cmd) + sub_parser_values.append(parser) + help_ctx = cli_ctx.help_cls(cli_ctx=cli_ctx) + help_files = [] + for cmd, parser in zip(sub_parser_keys, sub_parser_values): + try: + help_file = GroupHelpFile(help_ctx, cmd, parser) if _is_group(parser) \ + else CliCommandHelpFile(help_ctx, cmd, parser) + help_file.load(parser) + help_files.append(help_file) + except Exception as ex: + print("Skipped '{}' due to '{}'".format(cmd, ex)) + help_files = sorted(help_files, key=lambda x: x.command) + return help_files + +class AzHelpGenDirective(Directive): + def make_rst(self): + INDENT = ' ' + DOUBLEINDENT = INDENT * 2 + + az_cli = AzCli(cli_name='az', + commands_loader_cls=MainCommandsLoader, + invocation_cls=AzCliCommandInvoker, + parser_cls=AzCliCommandParser, + help_cls=AzCliHelp) + help_files = get_extension_help_files(az_cli) + + for help_file in help_files: + is_command = isinstance(help_file, CliCommandHelpFile) + yield '.. cli{}:: {}'.format('command' if is_command else 'group', help_file.command if help_file.command else 'az') #it is top level group az if command is empty + yield '' + yield '{}:summary: {}'.format(INDENT, help_file.short_summary) + yield '{}:description: {}'.format(INDENT, help_file.long_summary) + if help_file.deprecate_info: + yield '{}:deprecated: {}'.format(INDENT, help_file.deprecate_info._get_message(help_file.deprecate_info)) + yield '' + + if is_command and help_file.parameters: + group_registry = ArgumentGroupRegistry([p.group_name for p in help_file.parameters if p.group_name]) + + for arg in sorted(help_file.parameters, + key=lambda p: group_registry.get_group_priority(p.group_name) + + str(not p.required) + p.name): + yield '{}.. cliarg:: {}'.format(INDENT, arg.name) + yield '' + yield '{}:required: {}'.format(DOUBLEINDENT, arg.required) + if arg.deprecate_info: + yield '{}:deprecated: {}'.format(DOUBLEINDENT, arg.deprecate_info._get_message(arg.deprecate_info)) + short_summary = arg.short_summary or '' + possible_values_index = short_summary.find(' Possible values include') + short_summary = short_summary[0:possible_values_index + if possible_values_index >= 0 else len(short_summary)] + short_summary = short_summary.strip() + yield '{}:summary: {}'.format(DOUBLEINDENT, short_summary) + yield '{}:description: {}'.format(DOUBLEINDENT, arg.long_summary) + if arg.choices: + yield '{}:values: {}'.format(DOUBLEINDENT, ', '.join(sorted([str(x) for x in arg.choices]))) + if arg.default and arg.default != argparse.SUPPRESS: + try: + if arg.default.startswith(USER_HOME): + arg.default = arg.default.replace(USER_HOME, '~').replace('\\', '/') + except Exception: + pass + try: + arg.default = arg.default.replace("\\", "\\\\") + except Exception: + pass + yield '{}:default: {}'.format(DOUBLEINDENT, arg.default) + if arg.value_sources: + yield '{}:source: {}'.format(DOUBLEINDENT, ', '.join(_get_populator_commands(arg))) + yield '' + yield '' + if len(help_file.examples) > 0: + for e in help_file.examples: + fields = _get_example_fields(e) + yield '{}.. cliexample:: {}'.format(INDENT, fields['summary']) + yield '' + yield DOUBLEINDENT + fields['command'].replace("\\", "\\\\") + yield '' + + def run(self): + node = nodes.section() + node.document = self.state.document + result = ViewList() + for line in self.make_rst(): + result.append(line, '') + + nested_parse_with_titles(self.state, result, node) + return node.children + +def setup(app): + app.add_directive('azhelpgen', AzHelpGenDirective) + + +def _store_parsers(parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values): + for s in parser.subparsers.values(): + parser_keys.append(_get_parser_name(s)) + parser_values.append(s) + if _is_group(s): + for c in s.choices.values(): + sub_parser_keys.append(_get_parser_name(c)) + sub_parser_values.append(c) + _store_parsers(c, parser_keys, parser_values, sub_parser_keys, sub_parser_values) + +def _is_group(parser): + return getattr(parser, '_subparsers', None) is not None \ + or getattr(parser, 'choices', None) is not None + +def _get_parser_name(s): + return (s._prog_prefix if hasattr(s, '_prog_prefix') else s.prog)[3:] + + +def _get_populator_commands(param): + commands = [] + for value_source in param.value_sources: + try: + commands.append(value_source["link"]["command"]) + except TypeError: # old value_sources are strings + commands.append(value_source) + except KeyError: # new value_sources are dicts + continue + return commands + +def _get_example_fields(ex): + res = {} + try: + res['summary'] = ex.short_summary + res['command'] = ex.command + except AttributeError: + res['summary'] = ex.name + res['command'] = ex.text + + return res \ No newline at end of file diff --git a/src/scripts/refdoc/cligroup/__init__.py b/src/scripts/refdoc/cligroup/__init__.py new file mode 100644 index 00000000000..34913fb394d --- /dev/null +++ b/src/scripts/refdoc/cligroup/__init__.py @@ -0,0 +1,4 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- diff --git a/src/scripts/refdoc/cligroup/cligroup.py b/src/scripts/refdoc/cligroup/cligroup.py new file mode 100644 index 00000000000..49d5450f5bc --- /dev/null +++ b/src/scripts/refdoc/cligroup/cligroup.py @@ -0,0 +1,73 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +import copy +from docutils import nodes +from sphinx import addnodes +from sphinx.directives import ObjectDescription +from docutils.parsers.rst import Directive +from sphinx.util.docfields import Field + +cli_field_types = [ + Field('summary', label='Summary', has_arg=False, + names=('summary', 'shortdesc')), + Field('description', label='Description', has_arg=False, + names=('description', 'desc', 'longdesc')) + ] + +class CliBaseDirective(ObjectDescription): + def handle_signature(self, sig, signode): + signode += addnodes.desc_addname(sig, sig) + return sig + + def needs_arglist(self): + return False + + def add_target_and_index(self, name, sig, signode): + signode['ids'].append(name) + + def get_index_text(self, modname, name): + return name + +class CliGroupDirective(CliBaseDirective): + doc_field_types = copy.copy(cli_field_types) + doc_field_types.extend([ + Field('docsource', label='Doc Source', has_arg=False, + names=('docsource', 'documentsource')), + Field('deprecated', label='Deprecated', has_arg=False, + names=('deprecated')) + ]) + +class CliCommandDirective(CliBaseDirective): + doc_field_types = copy.copy(cli_field_types) + doc_field_types.extend([ + Field('docsource', label='Doc Source', has_arg=False, + names=('docsource', 'documentsource')), + Field('deprecated', label='Deprecated', has_arg=False, + names=('deprecated')) + ]) + +class CliArgumentDirective(CliBaseDirective): + doc_field_types = copy.copy(cli_field_types) + doc_field_types.extend([ + Field('required', label='Required', has_arg=False, + names=('required')), + Field('values', label='Allowed values', has_arg=False, + names=('values', 'choices', 'options')), + Field('default', label='Default value', has_arg=False, + names=('default')), + Field('source', label='Values from', has_arg=False, + names=('source', 'sources')), + Field('deprecated', label='Deprecated', has_arg=False, + names=('deprecated')) + ]) + +class CliExampleDirective(CliBaseDirective): + pass + +def setup(app): + app.add_directive('cligroup', CliGroupDirective) + app.add_directive('clicommand', CliCommandDirective) + app.add_directive('cliarg', CliArgumentDirective) + app.add_directive('cliexample', CliExampleDirective) diff --git a/src/scripts/refdoc/conf.py b/src/scripts/refdoc/conf.py new file mode 100644 index 00000000000..dd4c41d7d8f --- /dev/null +++ b/src/scripts/refdoc/conf.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +# -*- coding: utf-8 -*- + +import os +import sys +sys.path.insert(0, os.getcwd()) + +# For more information on all config options, see http://www.sphinx-doc.org/en/stable/config.html + +extensions = [ + 'sphinx.ext.doctest', + 'sphinx.ext.coverage', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinx.ext.autodoc', + 'cligroup.cligroup', + 'azhelpgen.azhelpgen' +] + +# The file name extension for the sphinx source files. +source_suffix = '.rst' +# The master toctree document. +master_doc = 'ind' + +# General information about the project. +project = 'az' +copyright = '2018, msft' +author = 'msft' + +# The language for content autogenerated by Sphinx +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# Disable smartquotes to keep the document just the same as that in _help.py +smartquotes = False diff --git a/src/scripts/refdoc/generate.py b/src/scripts/refdoc/generate.py new file mode 100644 index 00000000000..ae5778ae142 --- /dev/null +++ b/src/scripts/refdoc/generate.py @@ -0,0 +1,80 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from __future__ import print_function + +import os +import sys +import copy +import shutil +import argparse +import tempfile +import datetime +from subprocess import check_call, CalledProcessError + + +ENV_KEY_AZURE_EXTENSION_DIR = 'AZURE_EXTENSION_DIR' + +def print_status(msg=''): + print('-- '+msg) + + +def generate(ext_file, output_dir): + # Verify sphinx installed in environment before we get started + check_call(['sphinx-build', '--version']) + if not output_dir: + output_dir = tempfile.mkdtemp(prefix='ref-doc-out-', dir=os.getcwd()) + print_status('Using output directory {}'.format(output_dir)) + temp_extension_dir = tempfile.mkdtemp() + try: + pip_cmd = [sys.executable, '-m', 'pip', 'install', '--target', os.path.join(temp_extension_dir, 'extension'), + ext_file, '--disable-pip-version-check', '--no-cache-dir'] + print_status('Executing "{}"'.format(' '.join(pip_cmd))) + check_call(pip_cmd) + sphinx_cmd = ['sphinx-build', '-b', 'xml', os.path.dirname(os.path.realpath(__file__)), output_dir] + env = copy.copy(os.environ) + env[ENV_KEY_AZURE_EXTENSION_DIR] = temp_extension_dir + print_status('Executing "{}" with {} set to {}'.format(' '.join(sphinx_cmd), + ENV_KEY_AZURE_EXTENSION_DIR, + env['AZURE_EXTENSION_DIR'])) + check_call(sphinx_cmd, env=env) + finally: + shutil.rmtree(temp_extension_dir) + print_status('Cleaned up temp directory {}'.format(temp_extension_dir)) + print_status('Ref doc output available at {}'.format(output_dir)) + print_status('Done.') + + +def _type_ext_file(val): + ext_file = os.path.realpath(os.path.expanduser(val)) + if os.path.isdir(ext_file): + raise argparse.ArgumentTypeError('{} is a directory not an extension file.'.format(ext_file)) + if not os.path.isfile(ext_file): + raise argparse.ArgumentTypeError('{} does not exist.'.format(ext_file)) + if os.path.splitext(ext_file)[1] != '.whl': + raise argparse.ArgumentTypeError('{} Extension files should end with .whl'.format(ext_file)) + return ext_file + + +def _type_path(val): + out_path = os.path.realpath(os.path.expanduser(val)) + if not os.path.isdir(out_path): + raise argparse.ArgumentTypeError('{} is not a directory. Create it or specify different directory.'.format(out_path)) + if os.listdir(out_path): + raise argparse.ArgumentTypeError('{} is not empty. Empty output directory required.'.format(out_path)) + return out_path + + +# A small command line interface for the script +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Script to generate reference documentation for a single Azure CLI extension.') + + parser.add_argument('-e', '--extension-file', dest='ext_file', + help='Path to the extension .whl file.', required=True, type=_type_ext_file) + parser.add_argument('-o', '--output-dir', dest='output_dir', + help='Path to place the generated documentation. By default, a temporary directory will be created.', required=False, type=_type_path) + + args = parser.parse_args() + generate(args.ext_file, args.output_dir) diff --git a/src/scripts/refdoc/ind.rst b/src/scripts/refdoc/ind.rst new file mode 100644 index 00000000000..5fd5f988675 --- /dev/null +++ b/src/scripts/refdoc/ind.rst @@ -0,0 +1,2 @@ +.. azhelpgen:: + \ No newline at end of file diff --git a/src/scripts/run_az.py b/src/scripts/run_az.py new file mode 100644 index 00000000000..9575640951c --- /dev/null +++ b/src/scripts/run_az.py @@ -0,0 +1,10 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import sys + +from azure.cli import __main__ as cli_main + +sys.exit(cli_main(sys.argv)) From 86d020d71e02bac5dc5a35dfa93f4d5fadc73f4a Mon Sep 17 00:00:00 2001 From: Jay Aluru Date: Sun, 3 Nov 2024 17:01:12 -0800 Subject: [PATCH 2/2] deleting unwanted files --- src/scripts/automation/build_package.py | 34 -- src/scripts/ci/avail-ext-doc/README.md | 3 - src/scripts/ci/avail-ext-doc/list-template.md | 28 -- src/scripts/ci/avail-ext-doc/requirements.txt | 3 - .../ci/avail-ext-doc/update_extension_list.py | 81 ---- src/scripts/ci/azdev_linter_style.py | 293 -------------- src/scripts/ci/breaking_change_test.py | 208 ---------- src/scripts/ci/build_ext_cmd_tree.sh | 43 -- src/scripts/ci/codegen_cal.py | 96 ----- .../ci/credscan/CredScanSuppressions.json | 299 -------------- src/scripts/ci/index_ref_doc.py | 84 ---- src/scripts/ci/release_version_cal.py | 378 ------------------ src/scripts/ci/service_name.py | 127 ------ src/scripts/ci/sync_extensions.py | 208 ---------- src/scripts/ci/sync_extensions.sh | 8 - src/scripts/ci/test_index.py | 217 ---------- src/scripts/ci/test_index_ref_doc.sh | 13 - src/scripts/ci/test_init.py | 65 --- src/scripts/ci/test_source.py | 107 ----- src/scripts/ci/update_ext_cmd_tree.py | 112 ------ src/scripts/ci/update_index.py | 68 ---- src/scripts/ci/util.py | 165 -------- src/scripts/ci/verify_codeowners.py | 42 -- src/scripts/refdoc/README.md | 26 -- src/scripts/refdoc/azhelpgen/__init__.py | 4 - src/scripts/refdoc/azhelpgen/azhelpgen.py | 190 --------- src/scripts/refdoc/cligroup/__init__.py | 4 - src/scripts/refdoc/cligroup/cligroup.py | 73 ---- src/scripts/refdoc/conf.py | 42 -- src/scripts/refdoc/generate.py | 80 ---- src/scripts/refdoc/ind.rst | 2 - src/scripts/run_az.py | 10 - 32 files changed, 3113 deletions(-) delete mode 100644 src/scripts/automation/build_package.py delete mode 100644 src/scripts/ci/avail-ext-doc/README.md delete mode 100644 src/scripts/ci/avail-ext-doc/list-template.md delete mode 100644 src/scripts/ci/avail-ext-doc/requirements.txt delete mode 100644 src/scripts/ci/avail-ext-doc/update_extension_list.py delete mode 100644 src/scripts/ci/azdev_linter_style.py delete mode 100644 src/scripts/ci/breaking_change_test.py delete mode 100644 src/scripts/ci/build_ext_cmd_tree.sh delete mode 100644 src/scripts/ci/codegen_cal.py delete mode 100644 src/scripts/ci/credscan/CredScanSuppressions.json delete mode 100644 src/scripts/ci/index_ref_doc.py delete mode 100644 src/scripts/ci/release_version_cal.py delete mode 100644 src/scripts/ci/service_name.py delete mode 100644 src/scripts/ci/sync_extensions.py delete mode 100644 src/scripts/ci/sync_extensions.sh delete mode 100644 src/scripts/ci/test_index.py delete mode 100644 src/scripts/ci/test_index_ref_doc.sh delete mode 100644 src/scripts/ci/test_init.py delete mode 100644 src/scripts/ci/test_source.py delete mode 100644 src/scripts/ci/update_ext_cmd_tree.py delete mode 100644 src/scripts/ci/update_index.py delete mode 100644 src/scripts/ci/util.py delete mode 100644 src/scripts/ci/verify_codeowners.py delete mode 100644 src/scripts/refdoc/README.md delete mode 100644 src/scripts/refdoc/azhelpgen/__init__.py delete mode 100644 src/scripts/refdoc/azhelpgen/azhelpgen.py delete mode 100644 src/scripts/refdoc/cligroup/__init__.py delete mode 100644 src/scripts/refdoc/cligroup/cligroup.py delete mode 100644 src/scripts/refdoc/conf.py delete mode 100644 src/scripts/refdoc/generate.py delete mode 100644 src/scripts/refdoc/ind.rst delete mode 100644 src/scripts/run_az.py diff --git a/src/scripts/automation/build_package.py b/src/scripts/automation/build_package.py deleted file mode 100644 index 7e72d0c7360..00000000000 --- a/src/scripts/automation/build_package.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -#------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -#-------------------------------------------------------------------------- - -import argparse -import os -import glob -from subprocess import check_call - -DEFAULT_DEST_FOLDER = "./dist" - -def create_package(name, dest_folder=DEFAULT_DEST_FOLDER): - # a package will exist in either one, or the other folder. this is why we can resolve both at the same time. - absdirs = [os.path.dirname(package) for package in (glob.glob('{}/setup.py'.format(name)) + glob.glob('sdk/*/{}/setup.py'.format(name)))] - absdirpath = os.path.abspath(absdirs[0]) - check_call(['python', 'setup.py', 'bdist_wheel', '-d', dest_folder], cwd=absdirpath) - check_call(['python', 'setup.py', "sdist", "--format", "zip", '-d', dest_folder], cwd=absdirpath) - -if __name__ == '__main__': - """ - This file is used for Swagger CLI extension automation to build the wheel file and zip file - """ - parser = argparse.ArgumentParser(description='Build Azure package.') - parser.add_argument('name', help='The package name') - parser.add_argument('--dest', '-d', default=DEFAULT_DEST_FOLDER, - help='Destination folder. Relative to the package dir. [default: %(default)s]') - args = parser.parse_args() - create_package(args.name, args.dest) - - diff --git a/src/scripts/ci/avail-ext-doc/README.md b/src/scripts/ci/avail-ext-doc/README.md deleted file mode 100644 index 34066002d02..00000000000 --- a/src/scripts/ci/avail-ext-doc/README.md +++ /dev/null @@ -1,3 +0,0 @@ -This scripts is used in a Pipeline named **Azure CLI Extensions Sync** of Azure DevOps. - -It's for syncing available extensions list to Microsoft/azure-cli-docs. diff --git a/src/scripts/ci/avail-ext-doc/list-template.md b/src/scripts/ci/avail-ext-doc/list-template.md deleted file mode 100644 index 2738e5d4576..00000000000 --- a/src/scripts/ci/avail-ext-doc/list-template.md +++ /dev/null @@ -1,28 +0,0 @@ ---- -title: List of available Azure CLI extensions | Microsoft Docs -description: A complete list of officially supported Azure Command-Line Interface (CLI) extensions that are provided and maintained by Microsoft. -author: haroldrandom -ms.author: jianzen -manager: yonzhan,yungezz -ms.date: {{ date }} -ms.topic: article -ms.service: azure-cli -ms.devlang: azure-cli -ms.tool: azure-cli -ms.custom: devx-track-azurecli -keywords: az extension, azure cli extensions, azure extensions ---- - -# Available Azure CLI extensions - -This article is a complete list of the available extensions for the Azure CLI which are supported by Microsoft. The list of extensions is also available from the CLI. To get it, run [az extension list-available](/cli/azure/extension#az-extension-list-available): - -```azurecli-interactive -az extension list-available --output table -``` - -You will be prompted to install an extension on first use. - -| Extension | Required Minimum CLI Version | Description | Status | Release Notes | -|----|-----------------|-------------|---------|---------------|{% for extension in extensions %} -|[{{ extension.name }}]({{ extension.project_url }}) | {{ extension.min_cli_core_version }} | {{ extension.desc }} | {{ extension.status }} | [{{extension.version}}]({{extension.history}}) |{% endfor %} diff --git a/src/scripts/ci/avail-ext-doc/requirements.txt b/src/scripts/ci/avail-ext-doc/requirements.txt deleted file mode 100644 index e722f2a852b..00000000000 --- a/src/scripts/ci/avail-ext-doc/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -Jinja2==3.0.3 -requests -wheel==0.31.1 diff --git a/src/scripts/ci/avail-ext-doc/update_extension_list.py b/src/scripts/ci/avail-ext-doc/update_extension_list.py deleted file mode 100644 index 5eeb3701e63..00000000000 --- a/src/scripts/ci/avail-ext-doc/update_extension_list.py +++ /dev/null @@ -1,81 +0,0 @@ - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -""" -This script must be run at the root of repo folder, which is azure-cli-extensions/ -It's used to update a file "azure-cli-extensions-list.md" of MicrosoftDocs/azure-cli-docs. -The file content is list of all available latest extensions. -""" - -import os -import sys - -import collections -import datetime -from pkg_resources import parse_version - -from jinja2 import Template # pylint: disable=import-error -import requests - -# After migration to OneBranch, clone azure-cli-extensions repo and azure-docs-cli repo are required. -# Also standardizes the directory structure: -# - $(System.DefaultWorkingDirectory) -# - azure-cli-extensions -# - azure-docs-cli -AZURE_CLI_EXTENSIONS_REPO_PATH = os.path.abspath(os.path.join('.', 'azure-cli-extensions')) -AZURE_DOCS_CLI_REPO_PATH = os.path.abspath(os.path.join('.', 'azure-docs-cli')) -AVAILABLE_EXTENSIONS_DOC = os.path.join(AZURE_DOCS_CLI_REPO_PATH, 'docs-ref-conceptual', 'azure-cli-extensions-list.md') -TEMPLATE_FILE = os.path.join(AZURE_CLI_EXTENSIONS_REPO_PATH, 'scripts', 'ci', 'avail-ext-doc', 'list-template.md') - -sys.path.insert(0, os.path.join(AZURE_CLI_EXTENSIONS_REPO_PATH, 'scripts')) -from ci.util import get_index_data, INDEX_PATH - - -def get_extensions(): - extensions = [] - index_extensions = collections.OrderedDict(sorted(get_index_data()['extensions'].items())) - for _, exts in index_extensions.items(): - # Get latest version - exts = sorted(exts, key=lambda c: parse_version(c['metadata']['version']), reverse=True) - - # some extension modules may not include 'HISTORY.rst' - project_url = exts[0]['metadata']['extensions']['python.details']['project_urls']['Home'] - history_tmp = project_url + '/HISTORY.rst' - history = project_url if str(requests.get(history_tmp).status_code) == '404' else history_tmp - if exts[0]['metadata'].get('azext.isPreview'): - status = 'Preview' - elif exts[0]['metadata'].get('azext.isExperimental'): - status = 'Experimental' - else: - status = 'GA' - - extensions.append({ - 'name': exts[0]['metadata']['name'], - 'desc': exts[0]['metadata']['summary'], - 'min_cli_core_version': exts[0]['metadata']['azext.minCliCoreVersion'], - 'version': exts[0]['metadata']['version'], - 'project_url': project_url, - 'history': history, - 'status': status - }) - return extensions - - -def update_extensions_list(output_file): - with open(TEMPLATE_FILE, 'r') as doc_template: - template = Template(doc_template.read()) - if template is None: - raise RuntimeError("Failed to read template file {}".format(TEMPLATE_FILE)) - with open(output_file, 'w') as output: - output.write(template.render(extensions=get_extensions(), date=datetime.date.today().strftime("%m/%d/%Y"))) - - -def main(): - update_extensions_list(AVAILABLE_EXTENSIONS_DOC) - - -if __name__ == '__main__': - main() diff --git a/src/scripts/ci/azdev_linter_style.py b/src/scripts/ci/azdev_linter_style.py deleted file mode 100644 index 6b08712ec2d..00000000000 --- a/src/scripts/ci/azdev_linter_style.py +++ /dev/null @@ -1,293 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -""" -This script is used to run azdev linter and azdev style on extensions. - -It's only working on ADO by default. If want to run locally, -please update the target branch/commit to find diff in function find_modified_files_against_master_branch() -""" -import json -import logging -import os -import re -import shutil -from subprocess import CalledProcessError, check_call, check_output - -import service_name -from pkg_resources import parse_version -from util import get_ext_metadata - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) -ch = logging.StreamHandler() -ch.setLevel(logging.DEBUG) -logger.addHandler(ch) - -def separator_line(): - logger.info('-' * 100) - - -class ModifiedFilesNotAllowedError(Exception): - """ - Exception raise for the scenario that modified files is conflict against publish requirement. - Scenario 1: if modified files contain only src/index.json, don't raise - Scenario 2: if modified files contain not only extension code but also src/index.json, raise. - Scenario 3: if modified files don't contain src/index.json, don't raise. - """ - - def __str__(self): - msg = """ - --------------------------------------------------------------------------------------------------------- - You have modified both source code and src/index.json! - - There is a release pipeline will help you to build, upload and publish your extension. - Once your PR is merged into master branch, a new PR will be created to update src/index.json automatically. - - If you want us to help to build, upload and publish your extension, src/index.json must not be modified. - --------------------------------------------------------------------------------------------------------- - """ - return msg - - -class AzExtensionHelper: - def __init__(self, extension_name): - self.extension_name = extension_name - - @staticmethod - def _cmd(cmd): - logger.info(cmd) - check_call(cmd, shell=True) - - def add_from_url(self, url): - self._cmd('az extension add -s {} -y'.format(url)) - - def remove(self): - self._cmd('az extension remove -n {}'.format(self.extension_name)) - - -class AzdevExtensionHelper: - def __init__(self, extension_name): - self.extension_name = extension_name - - @staticmethod - def _cmd(cmd): - logger.info(cmd) - check_call(cmd, shell=True) - - def add_from_code(self): - self._cmd('azdev extension add {}'.format(self.extension_name)) - - def remove(self): - self._cmd('azdev extension remove {}'.format(self.extension_name)) - - def linter(self): - self._cmd('azdev linter --include-whl-extensions {}'.format(self.extension_name)) - - def style(self): - self._cmd('azdev style {}'.format(self.extension_name)) - - def build(self): - self._cmd('azdev extension build {}'.format(self.extension_name)) - - def check_extension_name(self): - extension_root_dir_name = self.extension_name - original_cwd = os.getcwd() - dist_dir = os.path.join(original_cwd, 'dist') - files = os.listdir(dist_dir) - logger.info(f"wheel files in the dist directory: {files}") - for f in files: - if f.endswith('.whl'): - NAME_REGEX = r'(.*)-\d+.\d+.\d+' - extension_name = re.findall(NAME_REGEX, f)[0] - extension_name = extension_name.replace('_', '-') - logger.info(f"extension name is: {extension_name}") - ext_file = os.path.join(dist_dir, f) - break - metadata = get_ext_metadata(dist_dir, ext_file, extension_name) - pretty_metadata = json.dumps(metadata, indent=2) - logger.info(f"metadata in the wheel file is: {pretty_metadata}") - shutil.rmtree(dist_dir) - if '_' in extension_root_dir_name: - raise ValueError(f"Underscores `_` are not allowed in the extension root directory, " - f"please change it to a hyphen `-`.") - if metadata['name'] != extension_name: - raise ValueError(f"The name {metadata['name']} in setup.py " - f"is not the same as the extension name {extension_name}! \n" - f"Please fix the name in setup.py!") - - -def find_modified_files_against_master_branch(): - """ - Find modified files from src/ only. - A: Added, C: Copied, M: Modified, R: Renamed, T: File type changed. - Deleted files don't count in diff. - """ - ado_pr_target_branch = 'origin/' + os.environ.get('ADO_PULL_REQUEST_TARGET_BRANCH') - - separator_line() - logger.info('pull request target branch: %s', ado_pr_target_branch) - - cmd = 'git --no-pager diff --name-only --diff-filter=ACMRT {} -- src/'.format(ado_pr_target_branch) - files = check_output(cmd.split()).decode('utf-8').split('\n') - files = [f for f in files if len(f) > 0] - - if files: - logger.info('modified files:') - separator_line() - for f in files: - logger.info(f) - - return files - - -def contain_index_json(files): - return 'src/index.json' in files - - -def contain_extension_code(files): - with open('src/index.json', 'r') as fd: - current_extensions = json.loads(fd.read()).get("extensions") - - current_extension_homes = set('src/{}'.format(name) for name in current_extensions) - - for file in files: - if any([file.startswith(prefix) for prefix in current_extension_homes]): - return True - - # for new added extensions - for file in files: - if 'src/' in file and os.path.isfile(file) and os.path.isdir(os.path.dirname(file)): - new_extension_home = os.path.dirname(file) - - if os.path.isfile(os.path.join(new_extension_home, 'setup.py')): - return True - - return False - - -def azdev_on_external_extension(index_json, azdev_type): - """ - Check if the modified metadata items in index.json refer to the extension in repo. - If not, az extension check on wheel. Otherwise skip it. - """ - - public_extensions = json.loads(check_output('az extension list-available -d', shell=True)) - - with open(index_json, 'r') as fd: - current_extensions = json.loads(fd.read()).get("extensions") - - for name in current_extensions: - modified_entries = [entry for entry in current_extensions[name] if entry not in public_extensions.get(name, [])] - - if not modified_entries: - continue - - # check if source code exists, if so, skip - if os.path.isdir('src/{}'.format(name)): - continue - - separator_line() - - latest_entry = max(modified_entries, key=lambda c: parse_version(c['metadata']['version'])) - - az_extension = AzExtensionHelper(name) - az_extension.add_from_url(latest_entry['downloadUrl']) - - azdev_extension = AzdevExtensionHelper(name) - if azdev_type in ['all', 'linter']: - azdev_extension.linter() - # TODO: - # azdev style support external extension - # azdev test support external extension - # azdev_extension.style() - - logger.info('Checking service name for external extensions') - service_name.check() - - az_extension.remove() - - -def azdev_on_internal_extension(modified_files, azdev_type): - extension_names = set() - - for f in modified_files: - src, name, *_ = f.split('/') - if os.path.isdir(os.path.join(src, name)): - extension_names.add(name) - - if not extension_names: - separator_line() - logger.info('no extension source code modified, no extension needs to be checked') - - for name in extension_names: - separator_line() - - azdev_extension = AzdevExtensionHelper(name) - azdev_extension.add_from_code() - if azdev_type in ['all', 'linter']: - azdev_extension.linter() - azdev_extension.build() - azdev_extension.check_extension_name() - if azdev_type in ['all', 'style']: - try: - azdev_extension.style() - except CalledProcessError as e: - statement_msg = """ - ------------------- Please note ------------------- - This task does not block the PR merge. - And it is recommended if you want to create a separate PR to fix these style issues. - CLI will modify it to force block PR merge on 2025. - ---------------------- Thanks ---------------------- - """ - logger.error(statement_msg) - exit(1) - - logger.info('Checking service name for internal extensions') - service_name.check() - - azdev_extension.remove() - - -def main(): - import argparse - parser = argparse.ArgumentParser(description='azdev linter and azdev style on modified extensions') - parser.add_argument('--type', - type=str, - help='Control whether azdev linter, azdev style, azdev test needs to be run. ' - 'Supported values: linter, style, test, all, all is the default.', default='all') - args = parser.parse_args() - azdev_type = args.type - logger.info('azdev type: %s', azdev_type) - modified_files = find_modified_files_against_master_branch() - - if len(modified_files) == 1 and contain_index_json(modified_files): - # Scenario 1. - # This scenarios is for modify index.json only. - # If the modified metadata items refer to the extension code exits in this repo, PR is be created via Pipeline. - # If the modified metadata items refer to the extension code doesn't exist, PR is created from Service Team. - # We try to run azdev linter and azdev style on it. - azdev_on_external_extension(modified_files[0], azdev_type) - else: - # modified files contain more than one file - - if contain_extension_code(modified_files): - # Scenario 2, we reject. - if contain_index_json(modified_files): - raise ModifiedFilesNotAllowedError() - - azdev_on_internal_extension(modified_files, azdev_type) - else: - separator_line() - logger.info('no extension source code modified, no extension needs to be checked') - separator_line() - - -if __name__ == '__main__': - try: - main() - except ModifiedFilesNotAllowedError as e: - logger.error(e) - exit(1) diff --git a/src/scripts/ci/breaking_change_test.py b/src/scripts/ci/breaking_change_test.py deleted file mode 100644 index 027fb6d3399..00000000000 --- a/src/scripts/ci/breaking_change_test.py +++ /dev/null @@ -1,208 +0,0 @@ -#!/usr/bin/env python - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -import json -import logging -import os -import subprocess -import sys - -from util import diff_code - -from azdev.utilities.path import get_cli_repo_path, get_ext_repo_paths -from subprocess import run - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) -ch = logging.StreamHandler() -ch.setLevel(logging.DEBUG) -logger.addHandler(ch) - -pull_request_number = os.environ.get('PULL_REQUEST_NUMBER', None) -job_name = os.environ.get('JOB_NAME', None) -azdev_test_result_dir = os.path.expanduser("~/.azdev/env_config/mnt/vss/_work/1/s/env") -src_branch = os.environ.get('PR_TARGET_BRANCH', None) -target_branch = 'merged_pr' -base_meta_path = '~/_work/1/base_meta' -diff_meta_path = '~/_work/1/diff_meta' -output_path = '~/_work/1/output_meta' - - -def install_extensions(diif_ref, branch): - for tname, ext_path in diif_ref: - ext_name = ext_path.split('/')[-1] - logger.info(f'installing extension: {ext_name}') - cmd = ['azdev', 'extension', 'add', ext_name] - logger.info(f'cmd: {cmd}') - out = run(cmd, capture_output=True, text=True) - if out.returncode and branch == 'base' and 'ERROR: extension(s) not found' in out.stderr: - print(f"{cmd} failed, extesion {ext_name} is not exist on base branch, skip it.") - sys.exit(0) - elif out.returncode: - raise RuntimeError(f"{cmd} failed") - - -def uninstall_extensions(diif_ref): - for tname, ext_path in diif_ref: - ext_name = ext_path.split('/')[-1] - logger.info(f'uninstalling extension: {ext_name}') - cmd = ['azdev', 'extension', 'remove', ext_name] - logger.info(f'cmd: {cmd}') - out = run(cmd) - if out.returncode: - raise RuntimeError(f"{cmd} failed") - - -def get_diff_meta_files(diff_ref): - cmd = ['git', 'checkout', '-b', target_branch] - print(cmd) - subprocess.run(cmd) - cmd = ['git', 'checkout', src_branch] - print(cmd) - subprocess.run(cmd) - cmd = ['git', 'checkout', target_branch] - print(cmd) - subprocess.run(cmd) - cmd = ['git', 'rev-parse', 'HEAD'] - print(cmd) - subprocess.run(cmd) - install_extensions(diff_ref, branch='target') - cmd = ['azdev', 'command-change', 'meta-export', '--src', src_branch, '--tgt', target_branch, '--repo', get_ext_repo_paths()[0], '--meta-output-path', diff_meta_path] - print(cmd) - subprocess.run(cmd) - cmd = ['ls', '-al', diff_meta_path] - print(cmd) - subprocess.run(cmd) - uninstall_extensions(diff_ref) - - -def get_base_meta_files(diff_ref): - cmd = ['git', 'checkout', src_branch] - print(cmd) - subprocess.run(cmd) - cmd = ['git', 'rev-parse', 'HEAD'] - print(cmd) - subprocess.run(cmd) - install_extensions(diff_ref, branch='base') - cmd = ['azdev', 'command-change', 'meta-export', 'EXT', '--meta-output-path', base_meta_path] - print(cmd) - subprocess.run(cmd) - cmd = ['ls', '-al', base_meta_path] - print(cmd) - subprocess.run(cmd) - - -def meta_diff(only_break=False): - if os.path.exists(diff_meta_path): - for file in os.listdir(diff_meta_path): - if file.endswith('.json'): - cmd = ['azdev', 'command-change', 'meta-diff', '--base-meta-file', os.path.join(base_meta_path, file), '--diff-meta-file', os.path.join(diff_meta_path, file), '--output-file', os.path.join(output_path, file)] - if only_break: - cmd.append('--only-break') - print(cmd) - subprocess.run(cmd) - cmd = ['ls', '-al', output_path] - print(cmd) - subprocess.run(cmd) - - -def get_pipeline_result(only_break=False): - pipeline_result = { - "breaking_change_test": { - "Details": [ - { - "TestName": "Azure CLI Extensions Breaking Change Test", - "Details": [] - } - ] - } - } - if pull_request_number != '$(System.PullRequest.PullRequestNumber)': - pipeline_result['pull_request_number'] = pull_request_number - if os.path.exists(output_path): - for file in os.listdir(output_path): - # skip empty file - if not os.path.getsize(os.path.join(output_path, file)): - continue - with open(os.path.join(output_path, file), 'r') as f: - items = json.load(f) - module = os.path.basename(file).split('.')[0].split('_')[1] - breaking_change = { - "Module": module, - "Status": "", - "Content": "" - } - status = 'Warning' - sorted_items = sorted(items, key=sort_by_content) - for item in sorted_items: - if item['is_break']: - status = 'Failed' - breaking_change['Content'] = build_markdown_content(item, breaking_change['Content']) - breaking_change['Status'] = status - pipeline_result['breaking_change_test']['Details'][0]['Details'].append(breaking_change) - if not pipeline_result['breaking_change_test']['Details'][0]['Details']: - pipeline_result['breaking_change_test']['Details'][0]['Details'].append({ - "Module": "Non Breaking Changes", - "Status": "Succeeded", - "Content": "" - }) - - result_length = len(json.dumps(pipeline_result, indent=4)) - if result_length > 65535: - if only_break: - logger.error("Breaking change report exceeds 65535 characters even with only_break=True.") - return pipeline_result - - logger.info("Regenerating breaking change report with only_break=True to control length within 65535.") - meta_diff(only_break=True) - pipeline_result = get_pipeline_result(only_break=True) - return pipeline_result - - return pipeline_result - - -def sort_by_content(item): - # Sort item by is_break, cmd_name and rule_message, - is_break = 0 if item['is_break'] else 1 - cmd_name = item['cmd_name'] if 'cmd_name' in item else item['subgroup_name'] - return is_break, cmd_name, item['rule_message'] - - -def build_markdown_content(item, content): - if content == "": - content = f'|rule|cmd_name|rule_message|suggest_message|\n|---|---|---|---|\n' - rule_link = f'[{item["rule_id"]} - {item["rule_name"]}]({item["rule_link_url"]})' - rule = f'❌ {rule_link} ' if item['is_break'] else f'⚠️ {rule_link}' - cmd_name = item['cmd_name'] if 'cmd_name' in item else item['subgroup_name'] - rule_message = item['rule_message'] - suggest_message = item['suggest_message'] - content += f'|{rule}|{cmd_name}|{rule_message}|{suggest_message}|\n' - return content - - -def save_pipeline_result(pipeline_result): - # save pipeline result to file - # /mnt/vss/.azdev/env_config/mnt/vss/_work/1/s/env/breaking_change_test.json - filename = os.path.join(azdev_test_result_dir, f'breaking_change_test.json') - with open(filename, 'w') as f: - json.dump(pipeline_result, f, indent=4) - logger.info(f"save pipeline result to file: {filename}") - - -def main(): - if pull_request_number != '$(System.PullRequest.PullRequestNumber)': - logger.info("Start breaking change test ...\n") - diff_ref = diff_code(src_branch, 'HEAD') - get_diff_meta_files(diff_ref) - get_base_meta_files(diff_ref) - meta_diff() - pipeline_result = get_pipeline_result() - save_pipeline_result(pipeline_result) - - -if __name__ == '__main__': - main() diff --git a/src/scripts/ci/build_ext_cmd_tree.sh b/src/scripts/ci/build_ext_cmd_tree.sh deleted file mode 100644 index 53bed033b85..00000000000 --- a/src/scripts/ci/build_ext_cmd_tree.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env bash - -changed_content=$(git --no-pager diff --diff-filter=ACMRT HEAD~$AZURE_EXTENSION_COMMIT_NUM -- src/index.json) -if [[ -z "$changed_content" ]]; then - echo "index.json not modified. End task." - exit 0 -fi - -pip install azure-cli-core azure-cli requests -pip install azure-storage-blob==1.5.0 -echo "Listing Available Extensions:" -az extension list-available -otable - -# turn off telemetry as it crowds output -export AZURE_CORE_COLLECT_TELEMETRY=False - -# use index.json in master branch -export AZURE_EXTENSION_INDEX_URL=https://raw.githubusercontent.com/Azure/azure-cli-extensions/master/src/index.json - -output=$(az extension list-available --query [].name -otsv) -# azure-cli-ml is replaced by ml -# disable alias which relies on Jinja2 2.10 -blocklist=("azure-cli-ml" "alias") - -rm -f ~/.azure/extCmdTreeToUpload.json - -filter_exts="" -for ext in $output; do - ext=${ext%$'\r'} # Remove a trailing newline when running on Windows. - if [[ " ${blocklist[@]} " =~ " ${ext} " ]]; then - continue - fi - filter_exts="${filter_exts} ${ext}" - echo "Adding extension:" $ext - az extension add --upgrade -n $ext - if [ $? != 0 ] - then - echo "Failed to load:" $ext - exit 1 - fi -done - -python $(cd $(dirname $0); pwd)/update_ext_cmd_tree.py $filter_exts diff --git a/src/scripts/ci/codegen_cal.py b/src/scripts/ci/codegen_cal.py deleted file mode 100644 index 92129befa6e..00000000000 --- a/src/scripts/ci/codegen_cal.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long -import os -import re -import argparse -from util import get_repo_root - -output_file = os.environ.get('output_file', None) -result_path = os.environ.get('result_path', None) - -cli_ext_path = get_repo_root() -cli_ext_src_path = os.path.join(cli_ext_path, "src") -print("cli_ext_path: ", cli_ext_path) -print("cli_ext_src_path: ", cli_ext_src_path) - -DEFAULT_SURVEY_MESSAGE = "Thank you for using our CodeGen tool. We value your feedback, and we would like to know how we can improve our product. Please take a few minutes to fill our [codegen survey](https://forms.office.com/r/j6rQuFUqUf?origin=lprLink) " - -def check_is_module_aaz_related(mod): - codegen_aaz_folder_pattern = re.compile(r"src/%s/azext_.*?/aaz/" % mod) - module_path = os.path.join(cli_ext_src_path, mod) - print("module_path: ", module_path) - for root, subdir, files in os.walk(module_path): - codegen_aaz_match = re.findall(codegen_aaz_folder_pattern, root) - if codegen_aaz_match: - print(codegen_aaz_match) - return True - return False - -def save_comment_pr_survey(comment_pr_survey): - print("check comment_pr_survey: ", comment_pr_survey) - with open(os.path.join(cli_ext_path, result_path, output_file), "w") as f: - f.write(str(comment_pr_survey) + "\n") - -def check_aaz_module(): - comment_pr_survey = 0 - changed_module_list = os.environ.get('changed_module_list', "").split() - for mod in changed_module_list: - if check_is_module_aaz_related(mod): - comment_pr_survey = 1 - break - save_comment_pr_survey(comment_pr_survey) - if comment_pr_survey == 1: - comment_message = [] - add_survey_hint_message(comment_message) - save_comment_message(comment_message) - -def add_survey_hint_message(comment_message): - comment_message.append("## CodeGen Tools Feedback Collection") - comment_message.append(DEFAULT_SURVEY_MESSAGE) - -def save_comment_message(comment_message): - print("comment_message:") - print(comment_message) - survey_comment_file = os.environ.get('survey_comment_file', "") - with open(os.path.join(cli_ext_path, result_path, survey_comment_file), "w") as f: - for line in comment_message: - f.write(line + "\n") - -def save_gh_output(): - with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: - print(f'CommentAAZSurvey={comment_pr_survey}', file=fh) - -def set_aaz_comment(): - if not os.path.exists(os.path.join(cli_ext_path, result_path, output_file)): - print("error in file dowdload") - return - comment_pr_survey = 0 - with open(os.path.join(cli_ext_path, result_path, output_file), "r") as f: - for line in f: - comment_pr_survey = int(line.strip()) - print("comment_pr_survey: ", comment_pr_survey) - save_gh_output() - if comment_pr_survey: - comment_message = [] - add_survey_hint_message(comment_message) - save_comment_message(comment_message) - -def main(job): - if job == "check": - check_aaz_module() - elif job == "set": - set_aaz_comment() - else: - print("unsupported job type") - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument("--job", choices=["check", "set"], required=True, help="job type") - args = parser.parse_args() - print(vars(args)) - main(args.job) \ No newline at end of file diff --git a/src/scripts/ci/credscan/CredScanSuppressions.json b/src/scripts/ci/credscan/CredScanSuppressions.json deleted file mode 100644 index 3b7a8439c27..00000000000 --- a/src/scripts/ci/credscan/CredScanSuppressions.json +++ /dev/null @@ -1,299 +0,0 @@ -{ - "tool": "Credential Scanner", - "suppressions": [ - { - "placeholder": "ManangementGroupServicePrincipal%40123", - "_justification": "[ManagementGroups] hard code dummy password" - }, - { - "file": [ - "src\\eventgrid\\azext_eventgrid\\tests\\latest\\recordings\\test_create_domain.yaml", - "src\\eventgrid\\azext_eventgrid\\tests\\latest\\recordings\\test_create_topic.yaml" - ], - "_justification": "false alarm about 'Found General Symmetric Key'" - }, - { - "file": [ - "src\\mixed-reality\\azext_mixed_reality\\tests\\latest\\recordings\\test_spatial_anchors_account_scenario.yaml" - ], - "_justification": "[MixedReality] Found Azure Shared Access Key / Web Hook Token" - }, - { - "placeholder": "aduser", - "_justification": "[NetAppFiles] Add suppression for false alarm in comments of _help.py" - }, - { - "placeholder": "AZURE_CLIENT_SECRET", - "_justification": "[db_up] false alarm about environment variable name" - }, - { - "placeholder": "ADPassword", - "_justification": "[SQL] false alarm about AuthenticationType enum value" - }, - { - "placeholder": "ActiveDirectoryPassword", - "_justification": "[DataMigration] false alarm about AuthenticationType enum value" - }, - { - "placeholder": "Ovg+o0K/0/2V8upg7AwlyAPCriEcOSXKuBu2Gv/PU70Y7aWDW3C2ZRmw6kYWqPWBaM1GosLkcSZkgsobAlT+Sw==", - "_justification": "[ADLS] false alarm on sign value" - }, - { - "placeholder": "4CTlhouPm0c3PWuTQ8t6Myh/FYegVUPqXUmdtL2byRytFPlt98L/pw==", - "_justification": "verification code in test_eventgrid_commands.py" - }, - { - "placeholder": "7jTiaEBVeYjC8X6gPDUhIhAnFRjaxZaGyS3hBbr09bmj3heQNhvrbA==", - "_justification": "verification code in test_eventgrid_commands.py" - }, - { - "placeholder": "Password123!", - "_justification": "dummy password in test_synapse_scenario.py" - }, - { - "file": [ - "src\\timeseriesinsights\\azext_timeseriesinsights\\tests\\latest\\recordings\\test_timeseriesinsights_environment_longterm.yaml", - "src\\timeseriesinsights\\azext_timeseriesinsights\\tests\\latest\\recordings\\test_timeseriesinsights_event_source_eventhub.yaml", - "src\\timeseriesinsights\\azext_timeseriesinsights\\tests\\latest\\recordings\\test_timeseriesinsights_event_source_iothub.yaml" - ], - "_justification": "Azure storgae access key" - }, - { - "file": [ - "src\\maintenance\\azext_maintenance\\tests\\latest\\recordings\\test_maintenance_commands.yaml", - "src\\maintenance\\azext_maintenance\\tests\\latest\\recordings\\test_maintenance_configuration_create.yaml", - "src\\maintenance\\azext_maintenance\\tests\\latest\\recordings\\test_signalr_commands.yaml", - "src\\notification-hub\\azext_notification_hub\\tests\\latest\\recordings\\test_notificationhubs.yaml" - ], - "_justification": "Azure Shared Access Key / Web Hook Token" - }, - { - "file": [ - "src\\eventgrid\\azext_eventgrid\\tests\\latest\\recordings\\test_Partner_scenarios.yaml" - ], - "_justification": "Found General Symmetric Key" - }, - { - "file": [ - "src\\datafactory\\azext_datafactory\\vendored_sdks\\datafactory\\models\\_data_factory_management_client_enums.py", - "src\\datafactory\\azext_datafactory\\vendored_sdks\\datafactory\\models\\_models_py3.py" - ], - "_justification": "Dummy secrets for one-off resources" - }, - { - "file": [ - "src\\communication\\azext_communication\\tests\\latest\\recordings\\test_communication_scenario.yaml" - ], - "_justification": "Dummy resources' tokens left during testing Micorosfot.Communication" - }, - { - "file": [ - "src\\application-insights\\azext_applicationinsights\\tests\\latest\\recordings\\test_api_key.yaml" - ], - "_justification": "random keys for one-off resources" - }, - { - "file": [ - "src\\stream-analytics\\azext_stream_analytics\\_help.py", - "src\\appservice-kube\\azext_appservice_kube\\_help.py" - ], - "_justification": "dummy passwords for one-off resources" - }, - { - "file": [ - "src\\storage-blob-preview\\azext_storage_blob_preview\\tests\\latest\\recordings\\test_storage_blob_incremental_copy.yaml" - ], - "_justification": "[Storage] response body contains random value recognized as secret in outdated recoding files of storage may remove in the future" - }, - { - "file": [ - "src\\image-gallery\\azext_image_gallery\\vendored_sdks\\azure_mgmt_compute\\models\\_models.py", - "src\\image-gallery\\azext_image_gallery\\vendored_sdks\\azure_mgmt_compute\\models\\_models_py3.py" - ], - "_justification": "Python SDK uses the example of fake password to indicate the format" - }, - { - "file": [ - "src\\aks-preview\\azext_aks_preview\\tests\\latest\\data\\setup_proxy.sh" - ], - "_justification": "Dummy self-signed certificate + private key used for testing only." - }, - { - "file": [ - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_acr.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_dapr_components.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_storage.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_identity_system.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_ingress_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_ingress_traffic_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_logstream.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_update.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_dapr_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_image_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_custom_domains_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_revision_label_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\cert.pfx", - "src\\containerapp\\azext_containerapp\\tests\\latest\\test_containerapp_commands.py", - "src\\containerapp\\azext_containerapp\\tests\\latest\\test_containerapp_env_commands.py", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_registry_msi.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_update_containers.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_anonymous_registry.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_identity_user.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_registry_identity_user.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_identity_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\recordings\\test_containerapp_scale_create.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_basic_no_existing_resources.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_environment.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_environment_prompt.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_ingress_both.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_ingress_external.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_ingress_internal.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_ingress_prompt.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_registry_all_args.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_registry_server_arg_only.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_replicas_global_scale.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_replicas_replicated_mode.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_resources_from_both_cpus_and_deploy_cpu.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_resources_from_deploy_cpu.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_resources_from_service_cpus.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_secrets.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_secrets_and_existing_environment.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_secrets_and_existing_environment_conflict.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_transport_arg.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_with_command_list.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_with_command_list_and_entrypoint.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_with_command_string.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_with_transport_mapping_arg.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_session_code_interpreter_nodelts_registry_e2e.yaml" - ], - "_justification": "Dummy resources' keys left during testing Microsoft.App (required for log-analytics to create managedEnvironments)" - }, - { - "file":[ - "src\\diskpool\\README.md", - "src\\datamigration\\README.md" - ], - "_justification": "README file example password" - }, - { - "file":[ - "src\\aks-preview\\azext_aks_preview\\_help.py" - ], - "_justification": "False positive detection, reported credentital not found." - }, - { - "file":[ - "src\\containerapp\\azext_containerapp\\tests\\latest\\test_containerapp_connected_env_scenario.py", - "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\test_cosmosdb_mongocluster_scenario.py", - "src\\devcenter\\azext_devcenter\\tests\\latest\\helper.py", - "src\\devcenter\\azext_devcenter\\tests\\latest\\test_devcenter_scenario.py", - "src\\image-gallery\\azext_image_gallery\\tests\\latest\\test_image_gallery.py", - "src\\scvmm\\azext_scvmm\\tests\\latest\\test_scvmm_scenario.py", - "src\\vm-repair\\azext_vm_repair\\tests\\latest\\test_repair_commands.py" - ], - "_justification": "Fake password for testing." - }, - { - "file":[ - "src\\kusto\\azext_kusto\\tests\\latest\\example_steps.py" - ], - "_justification": "Fake token for testing." - }, - { - "file":[ - "src\\palo-alto-networks\\azext_palo_alto_networks\\tests\\latest\\test_palo_alto_networks.py", - "src\\workloads\\azext_workloads\\tests\\latest\\create_infra_distributed_non_ha_config.json", - "src\\workloads\\azext_workloads\\tests\\latest\\InstallPayload.json" - ], - "_justification": "Fake key for testing." - }, - { - "file":[ - "src\\aks-preview\\azext_aks_preview\\tests\\latest\\recordings\\test_aks_update_with_windows_password.yaml", - "src\\application-insights\\azext_applicationinsights\\tests\\latest\\recordings\\test_connect_webapp.yaml", - "src\\application-insights\\azext_applicationinsights\\tests\\latest\\recordings\\test_connect_webapp_cross_resource_group.yaml", - "src\\appservice-kube\\azext_appservice_kube\\tests\\latest\\recordings\\test_linux_webapp_quick_create_kube.yaml", - "src\\appservice-kube\\azext_appservice_kube\\tests\\latest\\recordings\\test_webapp_elastic_scale_min_elastic_instance_count_kube.yaml", - "src\\appservice-kube\\azext_appservice_kube\\tests\\latest\\recordings\\test_webapp_elastic_scale_prewarmed_instance_count_kube.yaml", - "src\\appservice-kube\\azext_appservice_kube\\tests\\latest\\recordings\\test_win_webapp_quick_create_runtime_kube.yaml", - "src\\authV2\\azext_authV2\\tests\\latest\\recordings\\test_authV2_auth.yaml", - "src\\authV2\\azext_authV2\\tests\\latest\\recordings\\test_authV2_authclassic.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerappjob_create_with_environment_id.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerappjob_create_with_yaml.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_compose_create_environment_to_target_location.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_create_and_update_with_env_vars_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_create_with_vnet_yaml.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_dev_add_on_binding_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_certificate_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_certificate_upload_with_certificate_name.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_custom_domains.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_internal_only_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_la_dynamic_json.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_logs_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_msi_certificate.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_msi_custom_domains.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_mtls.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_p2p_traffic_encryption.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_update_custom_domains.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_env_usages.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_get_customdomainverificationid_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_java_component.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_java_component_deprecated.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_managed_service_binding_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_patch_list_and_apply_with_node18_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_patch_list_and_apply_with_python310_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_patch_list_and_apply_with_show_all_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_preview_connected_env_certificate.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_preview_connected_env_certificate_upload_with_certificate_name.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_preview_create_with_environment_id.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_resiliency.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_sessionpool.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_sessionpool_registry.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_session_code_interpreter_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_session_code_interpreter_nodelts_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_mooncake.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_source_with_default_registry_image.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_app_mount_azurefile_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_app_mount_nfsazurefile_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_app_mount_secret_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_container_app_mount_secret_update_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_dapr_component_resiliency.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_source_with_dockerfile_e2e.yaml", - "src\\containerapp\\azext_containerapp\\tests\\latest\\recordings\\test_containerapp_up_source_with_multiple_environments_e2e.yaml", - "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\recordings\\test_cosmosdb_collection.yaml", - "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\recordings\\test_cosmosdb_database.yaml", - "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\recordings\\test_cosmosdb_mongocluster_crud.yaml", - "src\\cosmosdb-preview\\azext_cosmosdb_preview\\tests\\latest\\recordings\\test_cosmosdb_mongocluster_firewall.yaml", - "src\\elastic\\azext_elastic\\tests\\latest\\recordings\\test_elastic_monitor.yaml", - "src\\image-gallery\\azext_image_gallery\\tests\\latest\\recordings\\test_community_gallery_operations.yaml", - "src\\image-gallery\\azext_image_gallery\\tests\\latest\\recordings\\test_create_vm_with_community_gallery_image.yaml", - "src\\image-gallery\\azext_image_gallery\\tests\\latest\\recordings\\test_shared_gallery_community.yaml", - "src\\kusto\\azext_kusto\\tests\\latest\\recordings\\test_kusto_Scenario.yaml", - "src\\palo-alto-networks\\azext_palo_alto_networks\\tests\\latest\\recordings\\test_palo_alto_firewall_v2.yaml", - "src\\purview\\azext_purview\\tests\\latest\\recordings\\test_purview_account.yaml", - "src\\quantum\\azext_quantum\\tests\\latest\\recordings\\test_workspace_keys.yaml", - "src\\qumulo\\azext_qumulo\\tests\\latest\\recordings\\test_file_system.yaml", - "src\\rdbms-connect\\azext_rdbms_connect\\tests\\latest\\recordings\\test_mysql_flexible_server_connect.yaml", - "src\\rdbms-connect\\azext_rdbms_connect\\tests\\latest\\recordings\\test_postgres_flexible_server_connect.yaml", - "src\\redisenterprise\\azext_redisenterprise\\tests\\latest\\recordings\\test_redisenterprise_scenario1.yaml", - "src\\redisenterprise\\azext_redisenterprise\\tests\\latest\\recordings\\test_redisenterprise_scenario2.yaml", - "src\\scvmm\\azext_scvmm\\tests\\latest\\recordings\\test_scvmm.yaml", - "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_api_portal.yaml", - "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_app_crud.yaml", - "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_app_crud_1.yaml", - "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_app_deploy_container.yaml", - "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_app_deploy_container_command.yaml", - "src\\spring\\azext_spring\\tests\\latest\\recordings\\test_blue_green_deployment.yaml", - "src\\staticwebapp\\azext_staticwebapp\\tests\\latest\\recordings\\test_staticwebapp_dbconnection_cosmosdb.yaml", - "src\\vmware\\azext_vmware\\tests\\latest\\recordings\\test_vmware_global_reach_connection.yaml", - "src\\workloads\\azext_workloads\\tests\\latest\\recordings\\test_workloads_svi.yaml", - "src\\workloads\\azext_workloads\\tests\\latest\\recordings\\test_workloads_svi_install.yaml", - "src\\oracle-database\\azext_oracle_database\\tests\\latest\\recordings\\test_oracledatabase_adbs.yaml", - "src\\storage-preview\\azext_storage_preview\\tests\\latest\\recordings\\test_storage_account_local_user.yaml" - ], - "_justification": "Fake credentials for recordings reported by new version credential scanner." - } - ] -} diff --git a/src/scripts/ci/index_ref_doc.py b/src/scripts/ci/index_ref_doc.py deleted file mode 100644 index 63589122d34..00000000000 --- a/src/scripts/ci/index_ref_doc.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -from __future__ import print_function - -import os -import sys -import tempfile -import traceback -import unittest -import shutil -from subprocess import check_call, CalledProcessError -from pkg_resources import parse_version, get_distribution - -from six import with_metaclass - -from util import get_index_data, get_whl_from_url, get_repo_root - - -REF_GEN_SCRIPT = os.path.join(get_repo_root(), 'scripts', 'refdoc', 'generate.py') - -REF_DOC_OUT_DIR = os.environ.get('AZ_EXT_REF_DOC_OUT_DIR', tempfile.mkdtemp()) - -if not os.path.isdir(REF_DOC_OUT_DIR): - print('{} is not a directory'.format(REF_DOC_OUT_DIR)) - sys.exit(1) - -ALL_TESTS = [] - -CLI_VERSION = get_distribution('azure-cli').version - -for extension_name, exts in get_index_data()['extensions'].items(): - parsed_cli_version = parse_version(CLI_VERSION) - filtered_exts = [] - for ext in exts: - if parsed_cli_version <= parse_version(ext['metadata'].get('azext.maxCliCoreVersion', CLI_VERSION)): - filtered_exts.append(ext) - if not filtered_exts: - continue - - candidates_sorted = sorted(filtered_exts, key=lambda c: parse_version(c['metadata']['version']), reverse=True) - chosen = candidates_sorted[0] - ALL_TESTS.append((extension_name, chosen['downloadUrl'], chosen['filename'])) - - -class TestIndexRefDocsMeta(type): - def __new__(mcs, name, bases, _dict): - - def gen_test(ext_name, ext_url, filename): - def test(self): - ext_file = get_whl_from_url(ext_url, filename, self.whl_dir) - ref_doc_out_dir = os.path.join(REF_DOC_OUT_DIR, ext_name) - if not os.path.isdir(ref_doc_out_dir): - os.mkdir(ref_doc_out_dir) - script_args = [sys.executable, REF_GEN_SCRIPT, '--extension-file', ext_file, '--output-dir', - ref_doc_out_dir] - try: - check_call(script_args) - except CalledProcessError as e: - traceback.print_exc() - raise e - return test - - for ext_name, ext_url, filename in ALL_TESTS: - test_name = "test_ref_doc_%s" % ext_name - _dict[test_name] = gen_test(ext_name, ext_url, filename) - return type.__new__(mcs, name, bases, _dict) - - -class IndexRefDocs(with_metaclass(TestIndexRefDocsMeta, unittest.TestCase)): - - def setUp(self): - self.whl_dir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.whl_dir) - - -if __name__ == '__main__': - unittest.main() diff --git a/src/scripts/ci/release_version_cal.py b/src/scripts/ci/release_version_cal.py deleted file mode 100644 index da0047c1343..00000000000 --- a/src/scripts/ci/release_version_cal.py +++ /dev/null @@ -1,378 +0,0 @@ -#!/usr/bin/env python - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long -import os -import re -import json -import subprocess -from packaging.version import parse - -from azdev.utilities.path import get_cli_repo_path, get_ext_repo_paths -from azdev.operations.extensions import cal_next_version -from azdev.operations.constant import (PREVIEW_INIT_SUFFIX, VERSION_MAJOR_TAG, VERSION_MINOR_TAG, - VERSION_PATCH_TAG, VERSION_STABLE_TAG, VERSION_PREVIEW_TAG, VERSION_PRE_TAG) -from util import get_index_data - -base_meta_path = os.environ.get('base_meta_path', None) -diff_meta_path = os.environ.get('diff_meta_path', None) -result_path = os.environ.get('result_path', None) -output_file = os.environ.get('output_file', None) -add_labels_file = os.environ.get('add_labels_file', None) -remove_labels_file = os.environ.get('remove_labels_file', None) -pr_user = os.environ.get('pr_user', "") - -changed_module_list = os.environ.get('changed_module_list', "").split() -diff_code_file = os.environ.get('diff_code_file', "") -print("diff_code_file:", diff_code_file) -pr_label_list = os.environ.get('pr_label_list', "") -pr_label_list = [name.lower().strip().strip('"').strip("'") for name in json.loads(pr_label_list)] - -DEFAULT_VERSION = "0.0.0" -INIT_RELEASE_VERSION = "1.0.0b1" -DEFAULT_MESSAGE = " - For more info about extension versioning, please refer to [Extension version schema](https://github.com/Azure/azure-cli/blob/release/doc/extensions/versioning_guidelines.md)" -block_pr = 0 - -cli_ext_path = get_ext_repo_paths()[0] -print("get_cli_repo_path: ", get_cli_repo_path()) -print("get_ext_repo_paths: ", cli_ext_path) - - -def extract_module_history_update_info(mod_update_info, mod): - """ - re pattern: - --- a/src/monitor-control-service/HISTORY.(rst|md) - +++ b/src/monitor-control-service/HISTORY.(rst|md) - """ - mod_update_info["history_updated"] = False - module_history_update_pattern = re.compile(r"\+\+\+.*?src/%s/HISTORY\.(rst|md)" % mod) - with open(diff_code_file, "r") as f: - for line in f: - mod_history_update_match = re.findall(module_history_update_pattern, line) - if mod_history_update_match: - mod_update_info["history_updated"] = True - - -def extract_module_version_update_info(mod_update_info, mod): - """ - re pattern: - --- a/src/monitor-control-service/setup.py - +++ b/src/monitor-control-service/setup.py - -VERSION = '1.0.1' - +VERSION = '1.1.1' - --- a/src/monitor-control-service/HISTORY.RST - py files exclude tests, vendored_sdks and aaz folder - """ - diff_file_started = False - module_setup_update_pattern = re.compile(r"\+\+\+.*?src/%s/(?!.*(?:tests|vendored_sdks|aaz)/).*?.py" % mod) - module_version_update_pattern = re.compile(r"\+\s?VERSION\s?\=\s?[\'\"]([0-9\.b]+)[\'\"]") - with open(diff_code_file, "r") as f: - for line in f: - if diff_file_started: - if mod_update_info.get("version_diff", None): - break - if line.find("diff") == 0: - diff_file_started = False - continue - mod_version_update_match = re.findall(module_version_update_pattern, line) - if mod_version_update_match and len(mod_version_update_match) == 1: - mod_update_info["version_diff"] = mod_version_update_match[0] - else: - mod_setup_update_match = re.findall(module_setup_update_pattern, line) - if mod_setup_update_match: - diff_file_started = True - - -def extract_module_metadata_update_info(mod_update_info, mod): - """ - re pattern: - --- a/src/monitor-control-service/azext_amcs/azext_metadata.json - +++ b/src/monitor-control-service/azext_amcs/azext_metadata.json - - "azext.isPreview": true - + "azext.isPreview": true - --- a/src/monitor-control-service/HISTORY.RST - """ - mod_update_info["meta_updated"] = False - module_meta_update_pattern = re.compile(r"\+\+\+.*?src/%s/azext_.*?/azext_metadata.json" % mod) - module_ispreview_add_pattern = re.compile(r"\+.*?azext.isPreview.*?true") - module_ispreview_remove_pattern = re.compile(r"\-.*?azext.isPreview.*?true") - module_isexp_add_pattern = re.compile(r"\+.*?azext.isExperimental.*?true") - module_isexp_remove_pattern = re.compile(r"\-.*?azext.isExperimental.*?true") - with open(diff_code_file, "r") as f: - for line in f: - if mod_update_info["meta_updated"]: - if line.find("---") == 0: - break - ispreview_add_match = re.findall(module_ispreview_add_pattern, line) - if ispreview_add_match and len(ispreview_add_match): - mod_update_info["preview_tag_diff"] = "add" - ispreview_remove_match = re.findall(module_ispreview_remove_pattern, line) - if ispreview_remove_match and len(ispreview_remove_match): - mod_update_info["preview_tag_diff"] = "remove" - isexp_add_match = re.findall(module_isexp_add_pattern, line) - if isexp_add_match and len(isexp_add_match): - mod_update_info["exp_tag_diff"] = "add" - isexp_remove_match = re.findall(module_isexp_remove_pattern, line) - if isexp_remove_match and len(isexp_remove_match): - mod_update_info["exp_tag_diff"] = "remove" - else: - module_meta_update_match = re.findall(module_meta_update_pattern, line) - if module_meta_update_match: - mod_update_info["meta_updated"] = True - - -def find_module_metadata_of_latest_version(mod): - cmd = ["azdev", "extension", "show", "--mod-name", mod, "--query", "pkg_name", "-o", "tsv"] - result = subprocess.run(cmd, stdout=subprocess.PIPE) - if result.returncode == 0: - mod = result.stdout.decode("utf8").strip() - return get_module_metadata_of_max_version(mod) - - -def extract_module_version_info(mod_update_info, mod): - next_version_pre_tag = get_next_version_pre_tag() - next_version_segment_tag = get_next_version_segment_tag() - print("next_version_pre_tag: ", next_version_pre_tag) - print("next_version_segment_tag: ", next_version_segment_tag) - base_meta_file = os.path.join(cli_ext_path, base_meta_path, "az_" + mod + "_meta.json") - diff_meta_file = os.path.join(cli_ext_path, diff_meta_path, "az_" + mod + "_meta.json") - if not os.path.exists(base_meta_file) and not os.path.exists(diff_meta_file): - print("no base and diff meta file found for {0}".format(mod)) - return - elif not os.path.exists(base_meta_file) and os.path.exists(diff_meta_file): - print("no base meta file found for {0}".format(mod)) - mod_update_info.update({"version": INIT_RELEASE_VERSION, "preview_tag": "add"}) - return - elif not os.path.exists(diff_meta_file): - print("no diff meta file found for {0}".format(mod)) - return - pre_release = find_module_metadata_of_latest_version(mod) - if pre_release is None: - next_version = cal_next_version(base_meta_file=base_meta_file, diff_meta_file=diff_meta_file, - current_version=DEFAULT_VERSION, - next_version_pre_tag=next_version_pre_tag, - next_version_segment_tag=next_version_segment_tag) - else: - next_version = cal_next_version(base_meta_file=base_meta_file, diff_meta_file=diff_meta_file, - current_version=pre_release['metadata']['version'], - is_preview=pre_release['metadata'].get("azext.isPreview", None), - is_experimental=pre_release['metadata'].get("azext.isExperimental", None), - next_version_pre_tag=next_version_pre_tag, - next_version_segment_tag=next_version_segment_tag) - mod_update_info.update(next_version) - - -def fill_module_update_info(mods_update_info): - for mod in changed_module_list: - update_info = {} - extract_module_history_update_info(update_info, mod) - extract_module_version_update_info(update_info, mod) - extract_module_metadata_update_info(update_info, mod) - extract_module_version_info(update_info, mod) - mods_update_info[mod] = update_info - print("mods_update_info") - print(mods_update_info) - - -def get_module_metadata_of_max_version(mod): - if mod not in get_index_data()['extensions']: - print("No previous release for {0}".format(mod)) - return None - pre_releases = get_index_data()['extensions'][mod] - candidates_sorted = sorted(pre_releases, key=lambda c: parse(c['metadata']['version']), reverse=True) - chosen = candidates_sorted[0] - return chosen - - -def get_next_version_pre_tag(): - if VERSION_STABLE_TAG in pr_label_list: - return VERSION_STABLE_TAG - elif VERSION_PREVIEW_TAG in pr_label_list: - return VERSION_PREVIEW_TAG - else: - return None - - -def get_next_version_segment_tag(): - """ - manual label order: - major > minor > patch > pre - """ - if VERSION_MAJOR_TAG in pr_label_list: - return VERSION_MAJOR_TAG - elif VERSION_MINOR_TAG in pr_label_list: - return VERSION_MINOR_TAG - elif VERSION_PATCH_TAG in pr_label_list: - return VERSION_PATCH_TAG - elif VERSION_PRE_TAG in pr_label_list: - return VERSION_PRE_TAG - else: - return None - - -def add_suggest_header(comment_message): - if block_pr == 1: - comment_message.insert(0, "## :warning: Release Requirements") - else: - comment_message.insert(0, "## Release Suggestions") - comment_message.insert(0, "Hi @" + pr_user) - - -def gen_history_comment_message(mod, mod_update_info, mod_message): - if not mod_update_info["history_updated"]: - mod_message.append(" - Please log updates into to `src/{0}/HISTORY.rst`".format(mod)) - - -def gen_version_comment_message(mod, mod_update_info, mod_message): - global block_pr - if not mod_update_info.get("version_diff", None): - if mod_update_info.get("version", None): - mod_message.append(" - Update `VERSION` to `{0}` in `src/{1}/setup.py`".format(mod_update_info.get("version", "-"), mod)) - else: - if mod_update_info.get("version", None): - bot_version = parse(mod_update_info['version']) - if mod_update_info.get("version_diff", None): - diff_version = parse(mod_update_info['version_diff']) - if diff_version != bot_version: - block_pr = 1 - mod_message.append(" - :warning: Please update `VERSION` to be `{0}` in `src/{1}/setup.py`".format(mod_update_info.get("version", "-"), mod)) - else: - mod_message.append(" - Update `VERSION` to `{0}` in `src/{1}/setup.py`".format(mod_update_info.get("version", "-"), mod)) - - -def gen_preview_comment_message(mod, mod_update_info, mod_message): - global block_pr - if mod_update_info.get("preview_tag", "-") == mod_update_info.get("preview_tag_diff", "-"): - return - preview_comment_message = " - " - if mod_update_info.get("version_diff", None): - block_pr = 1 - preview_comment_message += ":warning: " - if mod_update_info.get("preview_tag", None) and mod_update_info.get("preview_tag_diff", None): - if mod_update_info["preview_tag"] == "add" and mod_update_info["preview_tag_diff"] == "remove": - preview_comment_message += 'Set `azext.isPreview` to `true` in azext_metadata.json for {0}'.format(mod) - elif mod_update_info["preview_tag"] == "remove" and mod_update_info["preview_tag_diff"] == "add": - preview_comment_message += 'Remove `azext.isPreview: true` in azext_metadata.json for {0}'.format(mod) - elif not mod_update_info.get("preview_tag", None) and mod_update_info.get("preview_tag_diff", None): - if mod_update_info["preview_tag_diff"] == "add": - preview_comment_message += 'Remove `azext.isPreview: true` in azext_metadata.json for {0}'.format(mod) - elif mod_update_info["preview_tag_diff"] == "remove": - preview_comment_message += 'Set `azext.isPreview` to `true` in azext_metadata.json for {0}'.format(mod) - elif mod_update_info.get("preview_tag", None) and not mod_update_info.get("preview_tag_diff", None): - if mod_update_info["preview_tag"] == "add": - preview_comment_message += 'Set `azext.isPreview` to `true` in azext_metadata.json for {0}'.format(mod) - elif mod_update_info["preview_tag"] == "remove": - preview_comment_message += 'Remove `azext.isPreview: true` in azext_metadata.json for {0}'.format(mod) - mod_message.append(preview_comment_message) - - -def gen_exp_comment_message(mod, mod_update_info, mod_message): - global block_pr - if mod_update_info.get("exp_tag", "-") == mod_update_info.get("exp_tag_diff", "-"): - return - exp_comment_message = " - " - if mod_update_info.get("version_diff", None): - block_pr = 1 - exp_comment_message += ":warning: " - if mod_update_info.get("exp_tag", None) and mod_update_info.get("exp_tag_diff", None): - if mod_update_info["exp_tag"] == "remove" and mod_update_info["exp_tag_diff"] == "add": - exp_comment_message += 'Remove `azext.isExperimental: true` in azext_{0}/azext_metadata.json'.format(mod) - if mod_update_info["exp_tag"] == "add" and mod_update_info["exp_tag_diff"] == "remove": - exp_comment_message += 'Set `azext.isExperimental` to `true` in azext_metadata.json for {0}'.format(mod) - elif not mod_update_info.get("exp_tag", None) and mod_update_info.get("exp_tag_diff", None): - if mod_update_info["exp_tag_diff"] == "add": - exp_comment_message += 'Remove `azext.isExperimental: true` in azext_{0}/azext_metadata.json'.format(mod) - elif mod_update_info["exp_tag_diff"] == "remove": - exp_comment_message += 'Set `azext.isExperimental` to `true` in azext_metadata.json for {0}'.format(mod) - elif mod_update_info.get("exp_tag", None) and not mod_update_info.get("exp_tag_diff", None): - if mod_update_info["exp_tag"] == "add": - exp_comment_message += 'Set `azext.isExperimental` to `true` in azext_metadata.json for {0}'.format(mod) - elif mod_update_info["exp_tag"] == "remove": - exp_comment_message += 'Remove `azext.isExperimental: true` in azext_{0}/azext_metadata.json'.format(mod) - mod_message.append(exp_comment_message) - - -def gen_comment_message(mod, mod_update_info, comment_message): - mod_message = [] - gen_history_comment_message(mod, mod_update_info, mod_message) - gen_version_comment_message(mod, mod_update_info, mod_message) - gen_preview_comment_message(mod, mod_update_info, mod_message) - gen_exp_comment_message(mod, mod_update_info, mod_message) - if len(mod_message): - comment_message.append("### Module: {0}".format(mod)) - comment_message += mod_message - - -def add_label_hint_message(comment_message): - comment_message.append("#### Notes") - # comment_message.append(" - Stable/preview tag is inherited from last release. " - # "If needed, please add `stable`/`preview` label to modify it.") - # comment_message.append(" - Major/minor/patch/pre increment of version number is calculated by pull request " - # "code changes automatically. " - # "If needed, please add `major`/`minor`/`patch`/`pre` label to adjust it.") - comment_message.append(DEFAULT_MESSAGE) - - -def save_comment_message(comment_message): - with open(result_path + "/" + output_file, "w") as f: - for line in comment_message: - f.write(line + "\n") - - -def save_label_output(): - with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: - print(f'BlockPR={block_pr}', file=fh) - add_label_dict = { - "labels": ["release-version-block"] - } - removed_label = "release-version-block" - if block_pr == 0: - with open(result_path + "/" + remove_labels_file, "w") as f: - f.write(removed_label + "\n") - else: - # add block label and empty release label file - with open(result_path + "/" + add_labels_file, "w") as f: - json.dump(add_label_dict, f) - with open(result_path + "/" + remove_labels_file, "w") as f: - pass - - -def main(): - print("Start calculate release version ...\n") - print("base_meta_path: ", base_meta_path) - print("diff_meta_path: ", diff_meta_path) - print("output_file: ", output_file) - print("changed_module_list: ", changed_module_list) - print("pr_label_list: ", pr_label_list) - comment_message = [] - modules_update_info = {} - if len(changed_module_list) == 0: - comment_message.append(DEFAULT_MESSAGE) - save_comment_message(comment_message) - save_label_output() - return - fill_module_update_info(modules_update_info) - if len(modules_update_info) == 0: - comment_message.append(DEFAULT_MESSAGE) - save_comment_message(comment_message) - save_label_output() - return - for mod, update_info in modules_update_info.items(): - gen_comment_message(mod, update_info, comment_message) - if len(comment_message): - add_suggest_header(comment_message) - add_label_hint_message(comment_message) - else: - comment_message.append(DEFAULT_MESSAGE) - print("comment_message:") - print(comment_message) - print("block_pr:", block_pr) - save_comment_message(comment_message) - save_label_output() - - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/src/scripts/ci/service_name.py b/src/scripts/ci/service_name.py deleted file mode 100644 index a0b6e16e7b9..00000000000 --- a/src/scripts/ci/service_name.py +++ /dev/null @@ -1,127 +0,0 @@ -#!/usr/bin/env python - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -""" -Check format of service_name.json. Command and AzureServiceName are required. Others are optional. -Each highest level command group should have reference in service_name.json. -""" -import json - -from azure.cli.core import MainCommandsLoader, AzCli -from azure.cli.core._help import AzCliHelp, CliCommandHelpFile -from azure.cli.core.commands import AzCliCommandInvoker, ExtensionCommandSource -from azure.cli.core.parser import AzCliCommandParser -from knack.help import GroupHelpFile - - -def get_extension_help_files(cli_ctx): - - # 1. Create invoker and load command table and arguments. Remember to turn off applicability check. - invoker = cli_ctx.invocation_cls(cli_ctx=cli_ctx, commands_loader_cls=cli_ctx.commands_loader_cls, - parser_cls=cli_ctx.parser_cls, help_cls=cli_ctx.help_cls) - cli_ctx.invocation = invoker - - invoker.commands_loader.skip_applicability = True - cmd_table = invoker.commands_loader.load_command_table(None) - - # turn off applicability check for all loaders - for loaders in invoker.commands_loader.cmd_to_loader_map.values(): - for loader in loaders: - loader.skip_applicability = True - - # filter the command table to only get commands from extensions - cmd_table = {k: v for k, v in cmd_table.items() if isinstance(v.command_source, ExtensionCommandSource)} - invoker.commands_loader.command_table = cmd_table - print('FOUND {} command(s) from the extension.'.format(len(cmd_table))) - - for cmd_name in cmd_table: - invoker.commands_loader.load_arguments(cmd_name) - - invoker.parser.load_command_table(invoker.commands_loader) - - # 2. Now load applicable help files - parser_keys = [] - parser_values = [] - sub_parser_keys = [] - sub_parser_values = [] - _store_parsers(invoker.parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values) - for cmd, parser in zip(parser_keys, parser_values): - if cmd not in sub_parser_keys: - sub_parser_keys.append(cmd) - sub_parser_values.append(parser) - help_ctx = cli_ctx.help_cls(cli_ctx=cli_ctx) - help_files = [] - for cmd, parser in zip(sub_parser_keys, sub_parser_values): - try: - help_file = GroupHelpFile(help_ctx, cmd, parser) if _is_group(parser) \ - else CliCommandHelpFile(help_ctx, cmd, parser) - help_file.load(parser) - help_files.append(help_file) - except Exception as ex: - print("Skipped '{}' due to '{}'".format(cmd, ex)) - help_files = sorted(help_files, key=lambda x: x.command) - return help_files - - -def _store_parsers(parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values): - for s in parser.subparsers.values(): - parser_keys.append(_get_parser_name(s)) - parser_values.append(s) - if _is_group(s): - for c in s.choices.values(): - sub_parser_keys.append(_get_parser_name(c)) - sub_parser_values.append(c) - _store_parsers(c, parser_keys, parser_values, sub_parser_keys, sub_parser_values) - - -def _get_parser_name(s): - return (s._prog_prefix if hasattr(s, '_prog_prefix') else s.prog)[3:] - - -def _is_group(parser): - return getattr(parser, '_subparsers', None) is not None \ - or getattr(parser, 'choices', None) is not None - - -def check(): - az_cli = AzCli(cli_name='az', - commands_loader_cls=MainCommandsLoader, - invocation_cls=AzCliCommandInvoker, - parser_cls=AzCliCommandParser, - help_cls=AzCliHelp) - help_files = get_extension_help_files(az_cli) - # High command represents left most word in a command, e.g., vm, disk. - high_command_set = set() - for help_file in help_files: - if help_file.command: - high_command_set.add(help_file.command.split()[0]) - print('high_command_set:') - print(high_command_set) - - # Load and check service_name.json - with open('src/service_name.json') as f: - service_names = json.load(f) - print('Verifying src/service_name.json') - service_name_map = {} - for service_name in service_names: - command = service_name['Command'] - service = service_name['AzureServiceName'] - if not command.startswith('az '): - raise Exception('{} does not start with az!'.format(command)) - if not service: - raise Exception('AzureServiceName of {} is empty!'.format(command)) - service_name_map[command[3:]] = service - print('service_name_map:') - print(service_name_map) - - # Check existence in service_name.json - for high_command in high_command_set: - if high_command not in service_name_map: - raise Exception('No entry of {} in service_name.json. Please add one to the file.'.format(high_command)) - - -if __name__ == "__main__": - check() diff --git a/src/scripts/ci/sync_extensions.py b/src/scripts/ci/sync_extensions.py deleted file mode 100644 index beec0213455..00000000000 --- a/src/scripts/ci/sync_extensions.py +++ /dev/null @@ -1,208 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long -# pylint: disable=broad-except - -import os -import re -import json -import subprocess - -DEFAULT_TARGET_INDEX_URL = os.getenv('AZURE_EXTENSION_TARGET_INDEX_URL') -STORAGE_ACCOUNT = os.getenv('AZURE_EXTENSION_TARGET_STORAGE_ACCOUNT') -STORAGE_CONTAINER = os.getenv('AZURE_EXTENSION_TARGET_STORAGE_CONTAINER') -COMMIT_NUM = os.getenv('AZURE_EXTENSION_COMMIT_NUM') or 1 -BLOB_PREFIX = os.getenv('AZURE_EXTENSION_BLOB_PREFIX') - - -def _get_updated_extension_filenames(): - cmd = 'git --no-pager diff --diff-filter=ACMRT HEAD~{} -- src/index.json'.format(COMMIT_NUM) - updated_content = subprocess.check_output(cmd.split()).decode('utf-8') - FILENAME_REGEX = r'"filename":\s+"(.*?)"' - added_ext_filenames = {re.findall(FILENAME_REGEX, line)[0] for line in updated_content.splitlines() if line.startswith('+') and not line.startswith('+++') and 'filename' in line} - deleted_ext_filenames = {re.findall(FILENAME_REGEX, line)[0] for line in updated_content.splitlines() if line.startswith('-') and not line.startswith('---') and 'filename' in line} - return added_ext_filenames, deleted_ext_filenames - - -def download_file(url, file_path): - import requests - count = 3 - the_ex = None - while count > 0: - try: - response = requests.get(url, stream=True, allow_redirects=True) - assert response.status_code == 200, "Response code {}".format(response.status_code) - break - except Exception as ex: - the_ex = ex - count -= 1 - if count == 0: - msg = "Request for {} failed: {}".format(url, str(the_ex)) - print(msg) - raise Exception(msg) - - with open(file_path, 'wb') as f: - for chunk in response.iter_content(chunk_size=1024): - if chunk: # ignore keep-alive new chunks - f.write(chunk) - - -def _sync_wheel(ext, updated_indexes, failed_urls, overwrite, temp_dir): - download_url = ext['downloadUrl'] - whl_file = download_url.split('/')[-1] - whl_path = os.path.join(temp_dir, whl_file) - blob_name = f'{BLOB_PREFIX}/{whl_file}' if BLOB_PREFIX else whl_file - try: - download_file(download_url, whl_path) - except Exception: - failed_urls.append(download_url) - return - if not overwrite: - cmd = ['az', 'storage', 'blob', 'exists', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', - f'{STORAGE_ACCOUNT}', '--name', f'{blob_name}', '--auth-mode', 'login'] - result = subprocess.run(cmd, capture_output=True) - if result.stdout and json.loads(result.stdout)['exists']: - print("Skipping '{}' as it already exists...".format(whl_file)) - return - - cmd = ['az', 'storage', 'blob', 'upload', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', - f'{STORAGE_ACCOUNT}', '--name', f'{blob_name}', '--file', f'{os.path.abspath(whl_path)}', - '--auth-mode', 'login', '--overwrite'] - result = subprocess.run(cmd, capture_output=True) - if result.returncode != 0: - print(f"Failed to upload '{whl_file}' to the storage account") - raise - cmd = ['az', 'storage', 'blob', 'url', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', - f'{STORAGE_ACCOUNT}', '--name', f'{blob_name}', '--auth-mode', 'login'] - result = subprocess.run(cmd, capture_output=True) - print(result) - if result.stdout and result.returncode == 0: - url = json.loads(result.stdout) - else: - print("Failed to get the URL for '{}'".format(whl_file)) - raise - updated_index = ext - updated_index['downloadUrl'] = url - updated_indexes.append(updated_index) - - -def _update_target_extension_index(updated_indexes, deleted_ext_filenames, target_index_path): - NAME_REGEX = r'^(.*?)-\d+.\d+.\d+' - with open(target_index_path, 'r') as infile: - curr_index = json.loads(infile.read()) - for entry in updated_indexes: - filename = entry['filename'] - extension_name = re.findall(NAME_REGEX, filename)[0].replace('_', '-') - if extension_name not in curr_index['extensions'].keys(): - print("Adding '{}' to index...".format(filename)) - curr_index['extensions'][extension_name] = [entry] - else: - print("Updating '{}' in index...".format(filename)) - curr_entry = next((ext for ext in curr_index['extensions'][extension_name] if ext['filename'] == entry['filename']), None) - if curr_entry is not None: # in case of overwrite - curr_entry = entry - else: - curr_index['extensions'][extension_name].append(entry) - for filename in deleted_ext_filenames: - extension_name = re.findall(NAME_REGEX, filename)[0].replace('_', '-') - print("Deleting '{}' in index...".format(filename)) - curr_index['extensions'][extension_name] = [ext for ext in curr_index['extensions'][extension_name] if ext['filename'] != filename] - if not curr_index['extensions'][extension_name]: - del curr_index['extensions'][extension_name] - - with open(os.path.join(target_index_path), 'w') as outfile: - outfile.write(json.dumps(curr_index, indent=4, sort_keys=True)) - - -def main(): - import shutil - import tempfile - - net_added_ext_filenames = [] - net_deleted_ext_filenames = [] - sync_all = (os.getenv('AZURE_SYNC_ALL_EXTENSIONS') and os.getenv('AZURE_SYNC_ALL_EXTENSIONS').lower() == 'true') - if not sync_all: - added_ext_filenames, deleted_ext_filenames = _get_updated_extension_filenames() - # when there are large amount of changes, for instance deleting a lot of old versions of extensions, - # git may not accurately recognize the right changes, so we need to compare added filenames and deleted filenames - # to get the real changed ones. - net_added_ext_filenames = added_ext_filenames - deleted_ext_filenames - net_deleted_ext_filenames = deleted_ext_filenames - added_ext_filenames - if not net_added_ext_filenames and not net_deleted_ext_filenames: - print('index.json not changed. End task.') - return - temp_dir = tempfile.mkdtemp() - with open('src/index.json', 'r') as fd: - current_extensions = json.loads(fd.read()).get("extensions") - - target_index = DEFAULT_TARGET_INDEX_URL - os.mkdir(os.path.join(temp_dir, 'target')) - target_index_path = os.path.join(temp_dir, 'target', 'index.json') - try: - download_file(target_index, target_index_path) - except Exception as ex: - if sync_all and '404' in str(ex): - initial_index = {"extensions": {}, "formatVersion": "1"} - open(target_index_path, 'w').write(json.dumps(initial_index, indent=4, sort_keys=True)) - else: - raise - updated_indexes = [] - failed_urls = [] - if sync_all: - print('Syncing all extensions...\n') - # backup the old index.json - backup_index_name = f'{BLOB_PREFIX}/index.json.sav' if BLOB_PREFIX else 'index.json.sav' - cmd = ['az', 'storage', 'blob', 'upload', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', - f'{STORAGE_ACCOUNT}', '--name', f'{backup_index_name}', - '--file', f'{os.path.abspath(target_index_path)}', '--auth-mode', 'login', '--overwrite'] - result = subprocess.run(cmd, capture_output=True) - if result.returncode != 0: - print(f"Failed to upload '{target_index_path}' to the storage account") - raise - # start with an empty index.json to sync all extensions - initial_index = {"extensions": {}, "formatVersion": "1"} - open(target_index_path, 'w').write(json.dumps(initial_index, indent=4, sort_keys=True)) - for extension_name in current_extensions.keys(): - for ext in current_extensions[extension_name]: - print('Uploading {}'.format(ext['filename'])) - _sync_wheel(ext, updated_indexes, failed_urls, True, temp_dir) - else: - NAME_REGEX = r'^(.*?)-\d+.\d+.\d+' - for filename in net_added_ext_filenames: - extension_name = re.findall(NAME_REGEX, filename)[0].replace('_', '-') - print('Uploading {}'.format(filename)) - ext = current_extensions[extension_name][-1] - if ext['filename'] != filename: - ext = next((ext for ext in current_extensions[extension_name] if ext['filename'] == filename), None) - if ext is not None: - _sync_wheel(ext, updated_indexes, failed_urls, True, temp_dir) - - print("") - _update_target_extension_index(updated_indexes, net_deleted_ext_filenames, target_index_path) - index_name = f'{BLOB_PREFIX}/index.json' if BLOB_PREFIX else 'index.json' - cmd = ['az', 'storage', 'blob', 'upload', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', - f'{STORAGE_ACCOUNT}', '--name', f'{index_name}', '--file', f'{os.path.abspath(target_index_path)}', - '--auth-mode', 'login', '--overwrite'] - result = subprocess.run(cmd, capture_output=True) - if result.returncode != 0: - print(f"Failed to upload '{target_index_path}' to the storage account") - raise - print("\nSync finished.") - if updated_indexes: - print("New extensions available in:") - for updated_index in updated_indexes: - print(updated_index['downloadUrl']) - shutil.rmtree(temp_dir) - - if failed_urls: - print("\nFailed to download and sync the following files. They are skipped:") - for url in failed_urls: - print(url) - print("") - raise Exception("Failed to sync some packages.") - - -if __name__ == '__main__': - main() diff --git a/src/scripts/ci/sync_extensions.sh b/src/scripts/ci/sync_extensions.sh deleted file mode 100644 index f47d2ace5da..00000000000 --- a/src/scripts/ci/sync_extensions.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -set -ev -pip install requests - -echo $(pwd) - -python scripts/ci/sync_extensions.py diff --git a/src/scripts/ci/test_index.py b/src/scripts/ci/test_index.py deleted file mode 100644 index 093ebaa7388..00000000000 --- a/src/scripts/ci/test_index.py +++ /dev/null @@ -1,217 +0,0 @@ -#!/usr/bin/env python - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -""" Test the index and the wheels from both the index and from source extensions in repository """ - -from __future__ import print_function - -import glob -import hashlib -import json -import logging -import os -import shutil -import tempfile -import unittest - -from packaging import version -from util import SRC_PATH -from wheel.install import WHEEL_INFO_RE - -from util import get_ext_metadata, get_whl_from_url, get_index_data - - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) -ch = logging.StreamHandler() -ch.setLevel(logging.DEBUG) -logger.addHandler(ch) - - -def get_sha256sum(a_file): - sha256 = hashlib.sha256() - with open(a_file, 'rb') as f: - sha256.update(f.read()) - return sha256.hexdigest() - - -def check_min_version(extension_name, metadata): - if 'azext.minCliCoreVersion' not in metadata: - try: - azext_metadata = glob.glob(os.path.join(SRC_PATH, extension_name, 'azext_*', 'azext_metadata.json'))[0] - with open(azext_metadata, 'r') as f: - metadata = json.load(f) - if not metadata.get('azext.minCliCoreVersion'): - raise AssertionError(f'{extension_name} can not get azext.minCliCoreVersion') - except Exception as e: - logger.error(f'{extension_name} can not get azext.minCliCoreVersion: {e}') - raise e - - -class TestIndex(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.longMessage = True - cls.index = get_index_data() - cls.whl_cache_dir = tempfile.mkdtemp() - cls.whl_cache = {} - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.whl_cache_dir) - - def test_format_version(self): - self.assertEqual(self.index['formatVersion'], '1') - - def test_format_extensions_key(self): - self.assertIn('extensions', self.index) - - def test_format_extensions_value(self): - self.assertIsInstance(self.index['extensions'], dict) - - def test_extension_filenames(self): - for ext_name, exts in self.index['extensions'].items(): - self.assertEqual(ext_name.find('_'), -1, "Extension names should not contain underscores. " - "Found {}".format(ext_name)) - for item in exts: - self.assertTrue(item['filename'].endswith('.whl'), - "Filename {} must end with .whl".format(item['filename'])) - self.assertEqual(ext_name, item['metadata']['name'], - "Extension name mismatch in extensions['{}']. " - "Found an extension in the list with name " - "{}".format(ext_name, item['metadata']['name'])) - # Due to https://github.com/pypa/wheel/issues/235 we prevent whls built with 0.31.0 or greater. - # 0.29.0, 0.30.0 are the two previous versions before that release. - parsed_filename = WHEEL_INFO_RE(item['filename']) - p = parsed_filename.groupdict() - self.assertTrue(p.get('name'), "Can't get name for {}".format(item['filename'])) - built_wheel = p.get('abi') == 'none' and p.get('plat') == 'any' - self.assertTrue(built_wheel, - "{} of {} not platform independent wheel. " - "It should end in -none-any.whl".format(item['filename'], ext_name)) - - def test_extension_url_filename(self): - for exts in self.index['extensions'].values(): - for item in exts: - self.assertEqual(os.path.basename(item['downloadUrl']), item['filename'], - "Filename must match last segment of downloadUrl") - - def test_extension_url_pypi(self): - for exts in self.index['extensions'].values(): - for item in exts: - url = item['downloadUrl'] - pypi_url_prefix = 'https://pypi.python.org/packages/' - pythonhosted_url_prefix = 'https://files.pythonhosted.org/packages/' - if url.startswith(pypi_url_prefix): - new_url = url.replace(pypi_url_prefix, pythonhosted_url_prefix) - hash_pos = new_url.find('#') - new_url = new_url if hash_pos == -1 else new_url[:hash_pos] - self.fail("Replace {} with {}\n" - "See for more info https://wiki.archlinux.org/index.php/Python_package_guidelines" - "#PyPI_download_URLs".format(url, new_url)) - - def test_filename_duplicates(self): - filenames = [] - for exts in self.index['extensions'].values(): - for item in exts: - filenames.append(item['filename']) - filename_seen = set() - dups = [] - for f in filenames: - if f in filename_seen: - dups.append(f) - filename_seen.add(f) - self.assertFalse(dups, "Duplicate filenames found {}".format(dups)) - - @unittest.skipUnless(os.getenv('CI'), 'Skipped as not running on CI') - def test_checksums(self): - for exts in self.index['extensions'].values(): - # only test the latest version - item = max(exts, key=lambda ext: version.parse(ext['metadata']['version'])) - ext_file = get_whl_from_url(item['downloadUrl'], item['filename'], - self.whl_cache_dir, self.whl_cache) - print(ext_file) - computed_hash = get_sha256sum(ext_file) - self.assertEqual(computed_hash, item['sha256Digest'], - "Computed {} but found {} in index for {}".format(computed_hash, - item['sha256Digest'], - item['filename'])) - - @unittest.skipUnless(os.getenv('CI'), 'Skipped as not running on CI') - def test_metadata(self): - skipable_extension_thresholds = { - 'ip-group': '0.1.2', - 'vm-repair': '0.3.1', - 'mixed-reality': '0.0.2', - 'subscription': '0.1.4', - 'managementpartner': '0.1.3', - 'log-analytics': '0.2.1' - } - - historical_extensions = { - 'keyvault-preview': '0.1.3', - 'log-analytics': '0.2.1' - } - - extensions_dir = tempfile.mkdtemp() - for ext_name, exts in self.index['extensions'].items(): - # only test the latest version - item = max(exts, key=lambda ext: version.parse(ext['metadata']['version'])) - ext_dir = tempfile.mkdtemp(dir=extensions_dir) - ext_file = get_whl_from_url(item['downloadUrl'], item['filename'], - self.whl_cache_dir, self.whl_cache) - - print(ext_file) - - ext_version = item['metadata']['version'] - try: - metadata = get_ext_metadata(ext_dir, ext_file, ext_name) # check file exists - except ValueError as ex: - if ext_name in skipable_extension_thresholds: - threshold_version = skipable_extension_thresholds[ext_name] - - if version.parse(ext_version) <= version.parse(threshold_version): - continue - else: - raise ex - else: - raise ex - - try: - # check key properties exists - check_min_version(ext_name, metadata) - except AssertionError as ex: - if ext_name in historical_extensions: - threshold_version = historical_extensions[ext_name] - - if version.parse(ext_version) <= version.parse(threshold_version): - continue - else: - raise ex - else: - raise ex - - # Due to https://github.com/pypa/wheel/issues/195 we prevent whls built with 0.31.0 or greater. - # 0.29.0, 0.30.0 are the two previous versions before that release. - supported_generators = ['bdist_wheel (0.29.0)', 'bdist_wheel (0.30.0)'] - self.assertIn(metadata.get('generator'), supported_generators, - "{}: 'generator' should be one of {}. " - "Build the extension with a different version of the 'wheel' package " - "(e.g. `pip install wheel==0.30.0`). " - "This is due to https://github.com/pypa/wheel/issues/195".format(ext_name, - supported_generators)) - self.assertDictEqual(metadata, item['metadata'], - "Metadata for {} in index doesn't match the expected of: \n" - "{}".format(item['filename'], json.dumps(metadata, indent=2, sort_keys=True, - separators=(',', ': ')))) - - shutil.rmtree(extensions_dir) - - -if __name__ == '__main__': - unittest.main() diff --git a/src/scripts/ci/test_index_ref_doc.sh b/src/scripts/ci/test_index_ref_doc.sh deleted file mode 100644 index 5aa4815eb05..00000000000 --- a/src/scripts/ci/test_index_ref_doc.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash -set -ex - -# Install CLI -echo "Installing azure-cli..." - -pip install --pre azure-cli --extra-index-url https://azurecliprod.blob.core.windows.net/edge -q -pip install sphinx==1.7.0 Jinja2==3.0.3 -echo "Installed." - -python ./scripts/ci/index_ref_doc.py -v - -echo "OK." diff --git a/src/scripts/ci/test_init.py b/src/scripts/ci/test_init.py deleted file mode 100644 index 74cb45c9a26..00000000000 --- a/src/scripts/ci/test_init.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -from util import SRC_PATH -import logging -import os -import sys - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) -ch = logging.StreamHandler() -ch.setLevel(logging.DEBUG) -logger.addHandler(ch) - - -check_path = 'vendored_sdks' - - -def check_init_files(): - """ Check if the vendored_sdks directory contains __init__.py in all extensions """ - ref = [] - # SRC_PATH: azure-cli-extensions\src - for src_d in os.listdir(SRC_PATH): - # src_d: azure-cli-extensions\src\ext_name - src_d_full = os.path.join(SRC_PATH, src_d) - if os.path.isdir(src_d_full): - for d in os.listdir(src_d_full): - if d.startswith('azext_'): - # root_dir: azure-cli-extensions\src\ext_name\azext_ext_name - ref.append(check_init_recursive(os.path.join(src_d_full, d))) - return ref - - -def check_init_recursive(root_dir): - """ Check if a extension contains __init__.py - :param root_dir: azure-cli-extensions\src\{ext_name}\azext_{ext_name} - :param dirpath: azure-cli-extensions\src\{ext_name}\azext_{ext_name} - :param dirnames: all directories under dirpath, type: List[str] - :param filenames: all files under dirpath, type: List[str] - """ - error_flag = False - for (dirpath, dirnames, filenames) in os.walk(root_dir): - if dirpath.endswith(check_path): - # Check __init__.py not exists in the vendored_sdks dir and it contains at least one file - if '__init__.py' not in filenames and not is_empty_dir(dirpath): - logger.error(f'Directory {dirpath} does not contain __init__.py, please add it.') - error_flag = True - return error_flag - - -def is_empty_dir(root_dir): - """ Check if the directory did not contain any file """ - for (dirpath, dirnames, filenames) in os.walk(root_dir): - if filenames: - return False - return True - - -if __name__ == '__main__': - ref = check_init_files() - sys.exit(1) if any(ref) else sys.exit(0) diff --git a/src/scripts/ci/test_source.py b/src/scripts/ci/test_source.py deleted file mode 100644 index 94ddf5bafd7..00000000000 --- a/src/scripts/ci/test_source.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -from __future__ import print_function - -import logging -import os -import sys -import tempfile -import shutil -import shlex - -from subprocess import check_output, CalledProcessError, run -from util import SRC_PATH - -logger = logging.getLogger(__name__) - -ALL_TESTS = [] - -for src_d in os.listdir(SRC_PATH): - src_d_full = os.path.join(SRC_PATH, src_d) - if not os.path.isdir(src_d_full): - continue - pkg_name = next((d for d in os.listdir(src_d_full) if d.startswith('azext_')), None) - - # If running in Travis CI, only run tests for edited extensions - commit_range = os.environ.get('TRAVIS_COMMIT_RANGE') - if commit_range and not check_output(['git', '--no-pager', 'diff', '--name-only', commit_range, '--', src_d_full]): - continue - - # Running in Azure DevOps - cmd_tpl = 'git --no-pager diff --name-only origin/{commit_start} {commit_end} -- {code_dir}' - ado_branch_last_commit = os.environ.get('ADO_PULL_REQUEST_LATEST_COMMIT') - ado_target_branch = os.environ.get('ADO_PULL_REQUEST_TARGET_BRANCH') - if ado_branch_last_commit and ado_target_branch: - if ado_branch_last_commit == '$(System.PullRequest.SourceCommitId)': - # default value if ADO_PULL_REQUEST_LATEST_COMMIT not set in ADO - continue - elif ado_target_branch == '$(System.PullRequest.TargetBranch)': - # default value if ADO_PULL_REQUEST_TARGET_BRANCH not set in ADO - continue - else: - cmd = cmd_tpl.format(commit_start=ado_target_branch, commit_end=ado_branch_last_commit, code_dir=src_d_full) - if not check_output(shlex.split(cmd)): - continue - - # Find the package and check it has tests - if pkg_name and os.path.isdir(os.path.join(src_d_full, pkg_name, 'tests')): - ALL_TESTS.append((pkg_name, src_d_full)) - -logger.warning(f'ado_branch_last_commit: {ado_branch_last_commit}, ' - f'ado_target_branch: {ado_target_branch}, ' - f'ALL_TESTS: {ALL_TESTS}.') - - -def run_command(cmd, check_return_code=False, cwd=None): - logger.info(f'cmd: {cmd}') - out = run(cmd, check=True, cwd=cwd) - if check_return_code and out.returncode: - raise RuntimeError(f"{cmd} failed") - - -def test_extension(): - for pkg_name, ext_path in ALL_TESTS: - ext_name = ext_path.split('/')[-1] - logger.info(f'installing extension: {ext_name}') - cmd = ['azdev', 'extension', 'add', ext_name] - run_command(cmd, check_return_code=True) - - # Use azext_$ext_name, a unique long name for testing, to avoid the following error when the main module and extension name have the same name: - # 'containerapp' exists in both 'azext_containerapp' and 'containerapp'. Resolve using `azext_containerapp.containerapp` or `containerapp.containerapp` - # 'containerapp' not found. If newly added, re-run with --discover - # No tests selected to run. - # ---------------------------------------------------------------------- - # For the recommended azdev test example, please refer to: `azdev test --help` - # `python -m azdev test --no-exitfirst --discover --verbose azext_containerapp` - test_args = [sys.executable, '-m', 'azdev', 'test', '--no-exitfirst', '--discover', '--verbose', pkg_name] - logger.warning(f'test_args: {test_args}') - - run_command(test_args, check_return_code=True) - logger.info(f'uninstalling extension: {ext_name}') - cmd = ['azdev', 'extension', 'remove', ext_name] - run_command(cmd, check_return_code=True) - - -def test_source_wheels(): - # Test we can build all sources into wheels and that metadata from the wheel is valid - built_whl_dir = tempfile.mkdtemp() - source_extensions = [os.path.join(SRC_PATH, n) for n in os.listdir(SRC_PATH) - if os.path.isdir(os.path.join(SRC_PATH, n))] - for s in source_extensions: - if not os.path.isfile(os.path.join(s, 'setup.py')): - continue - try: - check_output(['python', 'setup.py', 'bdist_wheel', '-q', '-d', built_whl_dir], cwd=s) - except CalledProcessError as err: - raise("Unable to build extension {} : {}".format(s, err)) - shutil.rmtree(built_whl_dir) - - -if __name__ == '__main__': - test_extension() - test_source_wheels() diff --git a/src/scripts/ci/update_ext_cmd_tree.py b/src/scripts/ci/update_ext_cmd_tree.py deleted file mode 100644 index 6480d073a62..00000000000 --- a/src/scripts/ci/update_ext_cmd_tree.py +++ /dev/null @@ -1,112 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -import filecmp -import json -import os -import subprocess -import sys -from azure.cli.core import get_default_cli -from azure.cli.core._session import Session -from azure.cli.core.commands import _load_extension_command_loader -from azure.cli.core.extension import get_extension_modname, get_extension_path -from sync_extensions import download_file - -STORAGE_ACCOUNT = os.getenv('AZURE_EXTENSION_CMD_TREE_STORAGE_ACCOUNT') -STORAGE_CONTAINER = os.getenv('AZURE_EXTENSION_CMD_TREE_STORAGE_CONTAINER') -BLOB_PREFIX = os.getenv('AZURE_EXTENSION_CMD_TREE_BLOB_PREFIX') - -az_cli = get_default_cli() -file_name = 'extCmdTreeToUpload.json' - - -def merge(data, key, value): - if isinstance(value, str): - if key in data: - raise Exception(f"Key: {key} already exists in {data[key]}. 2 extensions cannot have the same command!") - data[key] = value - else: - data.setdefault(key, {}) - for k, v in value.items(): - merge(data[key], k, v) - - -def update_cmd_tree(ext_name): - print(f"Processing {ext_name}") - - ext_dir = get_extension_path(ext_name) - ext_mod = get_extension_modname(ext_name, ext_dir=ext_dir) - - invoker = az_cli.invocation_cls(cli_ctx=az_cli, commands_loader_cls=az_cli.commands_loader_cls, - parser_cls=az_cli.parser_cls, help_cls=az_cli.help_cls) - az_cli.invocation = invoker - - sys.path.append(ext_dir) - extension_command_table, _ = _load_extension_command_loader(invoker.commands_loader, None, ext_mod) - - EXT_CMD_TREE_TO_UPLOAD = Session(encoding='utf-8') - EXT_CMD_TREE_TO_UPLOAD.load(os.path.expanduser(os.path.join('~', '.azure', file_name))) - root = {} - for cmd_name, ext_cmd in extension_command_table.items(): - try: - # do not include hidden deprecated command - if ext_cmd.deprecate_info.hide: - print(f"Skip hidden deprecated command: {cmd_name}") - continue - except AttributeError: - pass - parts = cmd_name.split() - parent = root - for i, part in enumerate(parts): - if part in parent: - pass - elif i == len(parts) - 1: - parent[part] = ext_name - else: - parent[part] = {} - parent = parent[part] - print(root) - for k, v in root.items(): - merge(EXT_CMD_TREE_TO_UPLOAD.data, k, v) - EXT_CMD_TREE_TO_UPLOAD.save_with_retry() - - -def upload_cmd_tree(): - blob_file_name = 'extensionCommandTree.json' - if BLOB_PREFIX: - blob_file_name = f'{BLOB_PREFIX}/{blob_file_name}' - downloaded_file_name = 'extCmdTreeDownloaded.json' - file_path = os.path.expanduser(os.path.join('~', '.azure', file_name)) - - cmd = ['az', 'storage', 'blob', 'upload', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', - f'{STORAGE_ACCOUNT}', '--name', f'{blob_file_name}', '--file', f'{file_path}', '--auth-mode', 'login', - '--overwrite'] - result = subprocess.run(cmd, capture_output=True) - if result.returncode != 0: - print(f"Failed to upload '{blob_file_name}' to the storage account") - print(result) - - cmd = ['az', 'storage', 'blob', 'url', '--container-name', f'{STORAGE_CONTAINER}', '--account-name', - f'{STORAGE_ACCOUNT}', '--name', f'{blob_file_name}', '--auth-mode', 'login'] - result = subprocess.run(cmd, capture_output=True) - if result.stdout and result.returncode == 0: - url = json.loads(result.stdout) - else: - print(f"Failed to get the URL for '{blob_file_name}'") - raise - - download_file_path = os.path.expanduser(os.path.join('~', '.azure', downloaded_file_name)) - download_file(url, download_file_path) - if filecmp.cmp(file_path, download_file_path): - print("extensionCommandTree.json uploaded successfully. URL: {}".format(url)) - else: - raise Exception("Failed to update extensionCommandTree.json in the storage account") - - -if __name__ == '__main__': - for ext in sys.argv[1:]: - update_cmd_tree(ext) - print() - upload_cmd_tree() diff --git a/src/scripts/ci/update_index.py b/src/scripts/ci/update_index.py deleted file mode 100644 index 28f10f99177..00000000000 --- a/src/scripts/ci/update_index.py +++ /dev/null @@ -1,68 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -import hashlib -import json -import re -import sys -import tempfile - -from util import get_ext_metadata, get_whl_from_url - -NAME_REGEX = r'.*/([^/]*)-\d+.\d+.\d+' - - -def get_sha256sum(a_file): - sha256 = hashlib.sha256() - with open(a_file, 'rb') as f: - sha256.update(f.read()) - return sha256.hexdigest() - - -def main(): - - # Get extension WHL from URL - whl_path = None - try: - whl_path = sys.argv[1] - except IndexError: - pass - if not whl_path or not whl_path.endswith('.whl') or not whl_path.startswith('https:'): - raise ValueError('incorrect usage: update_script ') - - # Extract the extension name - try: - extension_name = re.findall(NAME_REGEX, whl_path)[0] - extension_name = extension_name.replace('_', '-') - except IndexError: - raise ValueError('unable to parse extension name') - - extensions_dir = tempfile.mkdtemp() - ext_dir = tempfile.mkdtemp(dir=extensions_dir) - whl_cache_dir = tempfile.mkdtemp() - whl_cache = {} - ext_file = get_whl_from_url(whl_path, extension_name, whl_cache_dir, whl_cache) - - with open('./src/index.json', 'r') as infile: - curr_index = json.loads(infile.read()) - - try: - entry = curr_index['extensions'][extension_name] - except IndexError: - raise ValueError('{} not found in index.json'.format(extension_name)) - - entry[0]['downloadUrl'] = whl_path - entry[0]['sha256Digest'] = get_sha256sum(ext_file) - entry[0]['filename'] = whl_path.split('/')[-1] - entry[0]['metadata'] = get_ext_metadata(ext_dir, ext_file, extension_name) - - # update index and write back to file - curr_index['extensions'][extension_name] = entry - with open('./src/index.json', 'w') as outfile: - outfile.write(json.dumps(curr_index, indent=4, sort_keys=True)) - - -if __name__ == '__main__': - main() diff --git a/src/scripts/ci/util.py b/src/scripts/ci/util.py deleted file mode 100644 index ffc7d54797b..00000000000 --- a/src/scripts/ci/util.py +++ /dev/null @@ -1,165 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -import logging -import os -import re -import shlex -import json -import zipfile - -from subprocess import check_output - -logger = logging.getLogger(__name__) - -# copy from wheel==0.30.0 -WHEEL_INFO_RE = re.compile( - r"""^(?P(?P.+?)(-(?P\d.+?))?) - ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?) - \.whl|\.dist-info)$""", - re.VERBOSE).match - - -def get_repo_root(): - current_dir = os.path.dirname(os.path.abspath(__file__)) - while not os.path.exists(os.path.join(current_dir, 'CONTRIBUTING.rst')): - current_dir = os.path.dirname(current_dir) - return current_dir - - -def _get_extension_modname(ext_dir): - # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L153 - EXTENSIONS_MOD_PREFIX = 'azext_' - pos_mods = [n for n in os.listdir(ext_dir) - if n.startswith(EXTENSIONS_MOD_PREFIX) and os.path.isdir(os.path.join(ext_dir, n))] - if len(pos_mods) != 1: - raise AssertionError("Expected 1 module to load starting with " - "'{}': got {}".format(EXTENSIONS_MOD_PREFIX, pos_mods)) - return pos_mods[0] - - -def _get_azext_metadata(ext_dir): - # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L109 - AZEXT_METADATA_FILENAME = 'azext_metadata.json' - azext_metadata = None - ext_modname = _get_extension_modname(ext_dir=ext_dir) - azext_metadata_filepath = os.path.join(ext_dir, ext_modname, AZEXT_METADATA_FILENAME) - if os.path.isfile(azext_metadata_filepath): - with open(azext_metadata_filepath) as f: - azext_metadata = json.load(f) - return azext_metadata - - -def get_ext_metadata(ext_dir, ext_file, ext_name): - # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L89 - WHL_METADATA_FILENAME = 'metadata.json' - zip_ref = zipfile.ZipFile(ext_file, 'r') - zip_ref.extractall(ext_dir) - zip_ref.close() - metadata = {} - dist_info_dirs = [f for f in os.listdir(ext_dir) if f.endswith('.dist-info')] - - azext_metadata = _get_azext_metadata(ext_dir) - - if not azext_metadata: - raise ValueError('azext_metadata.json for Extension "{}" Metadata is missing'.format(ext_name)) - - metadata.update(azext_metadata) - - for dist_info_dirname in dist_info_dirs: - parsed_dist_info_dir = WHEEL_INFO_RE(dist_info_dirname) - if parsed_dist_info_dir and parsed_dist_info_dir.groupdict().get('name') == ext_name.replace('-', '_'): - whl_metadata_filepath = os.path.join(ext_dir, dist_info_dirname, WHL_METADATA_FILENAME) - if os.path.isfile(whl_metadata_filepath): - with open(whl_metadata_filepath) as f: - metadata.update(json.load(f)) - return metadata - - -def get_whl_from_url(url, filename, tmp_dir, whl_cache=None): - if not whl_cache: - whl_cache = {} - if url in whl_cache: - return whl_cache[url] - import requests - TRIES = 3 - for try_number in range(TRIES): - try: - r = requests.get(url, stream=True) - assert r.status_code == 200, "Request to {} failed with {}".format(url, r.status_code) - break - except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as err: - import time - time.sleep(0.5) - continue - - ext_file = os.path.join(tmp_dir, filename) - with open(ext_file, 'wb') as f: - for chunk in r.iter_content(chunk_size=1024): - if chunk: # ignore keep-alive new chunks - f.write(chunk) - whl_cache[url] = ext_file - return ext_file - - -SRC_PATH = os.path.join(get_repo_root(), 'src') -INDEX_PATH = os.path.join(SRC_PATH, 'index.json') - - -def _catch_dup_keys(pairs): - seen = {} - for k, v in pairs: - if k in seen: - raise ValueError("duplicate key {}".format(k)) - seen[k] = v - return seen - - -def get_index_data(): - try: - with open(INDEX_PATH) as f: - return json.load(f, object_pairs_hook=_catch_dup_keys) - except ValueError as err: - raise AssertionError("Invalid JSON in {}: {}".format(INDEX_PATH, err)) - - -def diff_code(start, end): - diff_ref = [] - - for src_d in os.listdir(SRC_PATH): - src_d_full = os.path.join(SRC_PATH, src_d) - if not os.path.isdir(src_d_full): - continue - pkg_name = next((d for d in os.listdir(src_d_full) if d.startswith('azext_')), None) - - # If running in Travis CI, only run tests for edited extensions - commit_range = os.environ.get('TRAVIS_COMMIT_RANGE') - if commit_range and not check_output( - ['git', '--no-pager', 'diff', '--name-only', commit_range, '--', src_d_full]): - continue - - # Running in Azure DevOps - cmd_tpl = 'git --no-pager diff --name-only origin/{start} {end} -- {code_dir}' - # ado_branch_last_commit = os.environ.get('ADO_PULL_REQUEST_LATEST_COMMIT') - # ado_target_branch = os.environ.get('ADO_PULL_REQUEST_TARGET_BRANCH') - if start and end: - if end == '$(System.PullRequest.SourceCommitId)': - # default value if ADO_PULL_REQUEST_LATEST_COMMIT not set in ADO - continue - elif start == '$(System.PullRequest.TargetBranch)': - # default value if ADO_PULL_REQUEST_TARGET_BRANCH not set in ADO - continue - else: - cmd = cmd_tpl.format(start=start, end=end, - code_dir=src_d_full) - if not check_output(shlex.split(cmd)): - continue - - diff_ref.append((pkg_name, src_d_full)) - - logger.warning(f'start: {start}, ' - f'end: {end}, ' - f'diff_ref: {diff_ref}.') - return diff_ref diff --git a/src/scripts/ci/verify_codeowners.py b/src/scripts/ci/verify_codeowners.py deleted file mode 100644 index e56b16c6537..00000000000 --- a/src/scripts/ci/verify_codeowners.py +++ /dev/null @@ -1,42 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -from __future__ import print_function - -import os -import sys - -from util import get_repo_root - -REPO_ROOT = get_repo_root() -CODEOWNERS = os.path.join(REPO_ROOT, '.github', 'CODEOWNERS') -SRC_DIR = os.path.join(REPO_ROOT, 'src') - - -def get_src_dir_codeowners(): - contents = [] - with open(CODEOWNERS) as f: - contents = [x.strip() for x in f.readlines()] - return dict([x.split(' ', 1) for x in contents if x.startswith('/src/') and x.split(' ')[0].endswith('/')]) - - -def main(): - owners = get_src_dir_codeowners() - dangling_entries = [e for e in owners if not os.path.isdir(os.path.join(REPO_ROOT, e[1:]))] - missing_entries = ['/src/{}/'.format(p) for p in os.listdir(SRC_DIR) - if os.path.isdir(os.path.join(SRC_DIR, p)) and '/src/{}/'.format(p) not in owners] - if dangling_entries or missing_entries: - print('Errors whilst verifying {}!'.format(CODEOWNERS)) - if dangling_entries: - print("Remove the following {} as these directories don't exist.".format(dangling_entries), - file=sys.stderr) - if missing_entries: - print("The following directories are missing codeowners {}.".format(missing_entries), - file=sys.stderr) - sys.exit(1) - - -if __name__ == '__main__': - main() diff --git a/src/scripts/refdoc/README.md b/src/scripts/refdoc/README.md deleted file mode 100644 index ab71c0bc0cd..00000000000 --- a/src/scripts/refdoc/README.md +++ /dev/null @@ -1,26 +0,0 @@ -# Ref doc gen # - -Scripts for reference documentation generation for Azure CLI Extensions using [sphinx](http://www.sphinx-doc.org/en/master/) - -# How to generate the Sphinx help file output # - -## Set up environment ## - -1. Ensure the CLI is installed in your Python virtual environment. -2. Inside the Python virtual environment, run `pip install sphinx==1.7.0` - -## Run Sphinx ## - -1. Run the generate script `python scripts/refdoc/generate.py -e PATH_TO_WHL.whl` - -## Retrieve output ## - -1. By default, the XML output is stored in `ref-doc-out-*/ind.xml` - -## Generating Sphinx output for the latest versions of all extensions in index ## - -1. Ensure the CLI is installed in your Python virtual environment. -2. Inside the Python virtual environment, run `pip install sphinx==1.7.0` -3. Set the environment variable `AZ_EXT_REF_DOC_OUT_DIR` to an empty directory that exists. -4. Run the following script to generate sphinx output for the latest versions of all extensions in the index - `python ./scripts/ci/index_ref_doc.py -v` -5. The sphinx output will be in the directory pointed to by the `AZ_EXT_REF_DOC_OUT_DIR` environment variable. \ No newline at end of file diff --git a/src/scripts/refdoc/azhelpgen/__init__.py b/src/scripts/refdoc/azhelpgen/__init__.py deleted file mode 100644 index 34913fb394d..00000000000 --- a/src/scripts/refdoc/azhelpgen/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- diff --git a/src/scripts/refdoc/azhelpgen/azhelpgen.py b/src/scripts/refdoc/azhelpgen/azhelpgen.py deleted file mode 100644 index 9daadc7d1e5..00000000000 --- a/src/scripts/refdoc/azhelpgen/azhelpgen.py +++ /dev/null @@ -1,190 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -import argparse -import json -from os.path import expanduser -from docutils import nodes -from docutils.statemachine import ViewList -from docutils.parsers.rst import Directive -from sphinx.util.nodes import nested_parse_with_titles - -from knack.help_files import helps - -from knack.help import GroupHelpFile -from azure.cli.core import MainCommandsLoader, AzCli -from azure.cli.core.commands import AzCliCommandInvoker, ExtensionCommandSource -from azure.cli.core.parser import AzCliCommandParser -from azure.cli.core._help import AzCliHelp, CliCommandHelpFile, ArgumentGroupRegistry - -USER_HOME = expanduser('~') - - -def get_extension_help_files(cli_ctx): - - # 1. Create invoker and load command table and arguments. Remember to turn off applicability check. - invoker = cli_ctx.invocation_cls(cli_ctx=cli_ctx, commands_loader_cls=cli_ctx.commands_loader_cls, - parser_cls=cli_ctx.parser_cls, help_cls=cli_ctx.help_cls) - cli_ctx.invocation = invoker - - invoker.commands_loader.skip_applicability = True - cmd_table = invoker.commands_loader.load_command_table(None) - - # turn off applicability check for all loaders - for loaders in invoker.commands_loader.cmd_to_loader_map.values(): - for loader in loaders: - loader.skip_applicability = True - - # filter the command table to only get commands from extensions - cmd_table = {k: v for k, v in cmd_table.items() if isinstance(v.command_source, ExtensionCommandSource)} - invoker.commands_loader.command_table = cmd_table - print('FOUND {} command(s) from the extension.'.format(len(cmd_table))) - - for cmd_name in cmd_table: - invoker.commands_loader.load_arguments(cmd_name) - - invoker.parser.load_command_table(invoker.commands_loader) - - # 2. Now load applicable help files - parser_keys = [] - parser_values = [] - sub_parser_keys = [] - sub_parser_values = [] - _store_parsers(invoker.parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values) - for cmd, parser in zip(parser_keys, parser_values): - if cmd not in sub_parser_keys: - sub_parser_keys.append(cmd) - sub_parser_values.append(parser) - help_ctx = cli_ctx.help_cls(cli_ctx=cli_ctx) - help_files = [] - for cmd, parser in zip(sub_parser_keys, sub_parser_values): - try: - help_file = GroupHelpFile(help_ctx, cmd, parser) if _is_group(parser) \ - else CliCommandHelpFile(help_ctx, cmd, parser) - help_file.load(parser) - help_files.append(help_file) - except Exception as ex: - print("Skipped '{}' due to '{}'".format(cmd, ex)) - help_files = sorted(help_files, key=lambda x: x.command) - return help_files - -class AzHelpGenDirective(Directive): - def make_rst(self): - INDENT = ' ' - DOUBLEINDENT = INDENT * 2 - - az_cli = AzCli(cli_name='az', - commands_loader_cls=MainCommandsLoader, - invocation_cls=AzCliCommandInvoker, - parser_cls=AzCliCommandParser, - help_cls=AzCliHelp) - help_files = get_extension_help_files(az_cli) - - for help_file in help_files: - is_command = isinstance(help_file, CliCommandHelpFile) - yield '.. cli{}:: {}'.format('command' if is_command else 'group', help_file.command if help_file.command else 'az') #it is top level group az if command is empty - yield '' - yield '{}:summary: {}'.format(INDENT, help_file.short_summary) - yield '{}:description: {}'.format(INDENT, help_file.long_summary) - if help_file.deprecate_info: - yield '{}:deprecated: {}'.format(INDENT, help_file.deprecate_info._get_message(help_file.deprecate_info)) - yield '' - - if is_command and help_file.parameters: - group_registry = ArgumentGroupRegistry([p.group_name for p in help_file.parameters if p.group_name]) - - for arg in sorted(help_file.parameters, - key=lambda p: group_registry.get_group_priority(p.group_name) - + str(not p.required) + p.name): - yield '{}.. cliarg:: {}'.format(INDENT, arg.name) - yield '' - yield '{}:required: {}'.format(DOUBLEINDENT, arg.required) - if arg.deprecate_info: - yield '{}:deprecated: {}'.format(DOUBLEINDENT, arg.deprecate_info._get_message(arg.deprecate_info)) - short_summary = arg.short_summary or '' - possible_values_index = short_summary.find(' Possible values include') - short_summary = short_summary[0:possible_values_index - if possible_values_index >= 0 else len(short_summary)] - short_summary = short_summary.strip() - yield '{}:summary: {}'.format(DOUBLEINDENT, short_summary) - yield '{}:description: {}'.format(DOUBLEINDENT, arg.long_summary) - if arg.choices: - yield '{}:values: {}'.format(DOUBLEINDENT, ', '.join(sorted([str(x) for x in arg.choices]))) - if arg.default and arg.default != argparse.SUPPRESS: - try: - if arg.default.startswith(USER_HOME): - arg.default = arg.default.replace(USER_HOME, '~').replace('\\', '/') - except Exception: - pass - try: - arg.default = arg.default.replace("\\", "\\\\") - except Exception: - pass - yield '{}:default: {}'.format(DOUBLEINDENT, arg.default) - if arg.value_sources: - yield '{}:source: {}'.format(DOUBLEINDENT, ', '.join(_get_populator_commands(arg))) - yield '' - yield '' - if len(help_file.examples) > 0: - for e in help_file.examples: - fields = _get_example_fields(e) - yield '{}.. cliexample:: {}'.format(INDENT, fields['summary']) - yield '' - yield DOUBLEINDENT + fields['command'].replace("\\", "\\\\") - yield '' - - def run(self): - node = nodes.section() - node.document = self.state.document - result = ViewList() - for line in self.make_rst(): - result.append(line, '') - - nested_parse_with_titles(self.state, result, node) - return node.children - -def setup(app): - app.add_directive('azhelpgen', AzHelpGenDirective) - - -def _store_parsers(parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values): - for s in parser.subparsers.values(): - parser_keys.append(_get_parser_name(s)) - parser_values.append(s) - if _is_group(s): - for c in s.choices.values(): - sub_parser_keys.append(_get_parser_name(c)) - sub_parser_values.append(c) - _store_parsers(c, parser_keys, parser_values, sub_parser_keys, sub_parser_values) - -def _is_group(parser): - return getattr(parser, '_subparsers', None) is not None \ - or getattr(parser, 'choices', None) is not None - -def _get_parser_name(s): - return (s._prog_prefix if hasattr(s, '_prog_prefix') else s.prog)[3:] - - -def _get_populator_commands(param): - commands = [] - for value_source in param.value_sources: - try: - commands.append(value_source["link"]["command"]) - except TypeError: # old value_sources are strings - commands.append(value_source) - except KeyError: # new value_sources are dicts - continue - return commands - -def _get_example_fields(ex): - res = {} - try: - res['summary'] = ex.short_summary - res['command'] = ex.command - except AttributeError: - res['summary'] = ex.name - res['command'] = ex.text - - return res \ No newline at end of file diff --git a/src/scripts/refdoc/cligroup/__init__.py b/src/scripts/refdoc/cligroup/__init__.py deleted file mode 100644 index 34913fb394d..00000000000 --- a/src/scripts/refdoc/cligroup/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- diff --git a/src/scripts/refdoc/cligroup/cligroup.py b/src/scripts/refdoc/cligroup/cligroup.py deleted file mode 100644 index 49d5450f5bc..00000000000 --- a/src/scripts/refdoc/cligroup/cligroup.py +++ /dev/null @@ -1,73 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -import copy -from docutils import nodes -from sphinx import addnodes -from sphinx.directives import ObjectDescription -from docutils.parsers.rst import Directive -from sphinx.util.docfields import Field - -cli_field_types = [ - Field('summary', label='Summary', has_arg=False, - names=('summary', 'shortdesc')), - Field('description', label='Description', has_arg=False, - names=('description', 'desc', 'longdesc')) - ] - -class CliBaseDirective(ObjectDescription): - def handle_signature(self, sig, signode): - signode += addnodes.desc_addname(sig, sig) - return sig - - def needs_arglist(self): - return False - - def add_target_and_index(self, name, sig, signode): - signode['ids'].append(name) - - def get_index_text(self, modname, name): - return name - -class CliGroupDirective(CliBaseDirective): - doc_field_types = copy.copy(cli_field_types) - doc_field_types.extend([ - Field('docsource', label='Doc Source', has_arg=False, - names=('docsource', 'documentsource')), - Field('deprecated', label='Deprecated', has_arg=False, - names=('deprecated')) - ]) - -class CliCommandDirective(CliBaseDirective): - doc_field_types = copy.copy(cli_field_types) - doc_field_types.extend([ - Field('docsource', label='Doc Source', has_arg=False, - names=('docsource', 'documentsource')), - Field('deprecated', label='Deprecated', has_arg=False, - names=('deprecated')) - ]) - -class CliArgumentDirective(CliBaseDirective): - doc_field_types = copy.copy(cli_field_types) - doc_field_types.extend([ - Field('required', label='Required', has_arg=False, - names=('required')), - Field('values', label='Allowed values', has_arg=False, - names=('values', 'choices', 'options')), - Field('default', label='Default value', has_arg=False, - names=('default')), - Field('source', label='Values from', has_arg=False, - names=('source', 'sources')), - Field('deprecated', label='Deprecated', has_arg=False, - names=('deprecated')) - ]) - -class CliExampleDirective(CliBaseDirective): - pass - -def setup(app): - app.add_directive('cligroup', CliGroupDirective) - app.add_directive('clicommand', CliCommandDirective) - app.add_directive('cliarg', CliArgumentDirective) - app.add_directive('cliexample', CliExampleDirective) diff --git a/src/scripts/refdoc/conf.py b/src/scripts/refdoc/conf.py deleted file mode 100644 index dd4c41d7d8f..00000000000 --- a/src/scripts/refdoc/conf.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python3 -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -# -*- coding: utf-8 -*- - -import os -import sys -sys.path.insert(0, os.getcwd()) - -# For more information on all config options, see http://www.sphinx-doc.org/en/stable/config.html - -extensions = [ - 'sphinx.ext.doctest', - 'sphinx.ext.coverage', - 'sphinx.ext.ifconfig', - 'sphinx.ext.viewcode', - 'sphinx.ext.autodoc', - 'cligroup.cligroup', - 'azhelpgen.azhelpgen' -] - -# The file name extension for the sphinx source files. -source_suffix = '.rst' -# The master toctree document. -master_doc = 'ind' - -# General information about the project. -project = 'az' -copyright = '2018, msft' -author = 'msft' - -# The language for content autogenerated by Sphinx -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - -# Disable smartquotes to keep the document just the same as that in _help.py -smartquotes = False diff --git a/src/scripts/refdoc/generate.py b/src/scripts/refdoc/generate.py deleted file mode 100644 index ae5778ae142..00000000000 --- a/src/scripts/refdoc/generate.py +++ /dev/null @@ -1,80 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -from __future__ import print_function - -import os -import sys -import copy -import shutil -import argparse -import tempfile -import datetime -from subprocess import check_call, CalledProcessError - - -ENV_KEY_AZURE_EXTENSION_DIR = 'AZURE_EXTENSION_DIR' - -def print_status(msg=''): - print('-- '+msg) - - -def generate(ext_file, output_dir): - # Verify sphinx installed in environment before we get started - check_call(['sphinx-build', '--version']) - if not output_dir: - output_dir = tempfile.mkdtemp(prefix='ref-doc-out-', dir=os.getcwd()) - print_status('Using output directory {}'.format(output_dir)) - temp_extension_dir = tempfile.mkdtemp() - try: - pip_cmd = [sys.executable, '-m', 'pip', 'install', '--target', os.path.join(temp_extension_dir, 'extension'), - ext_file, '--disable-pip-version-check', '--no-cache-dir'] - print_status('Executing "{}"'.format(' '.join(pip_cmd))) - check_call(pip_cmd) - sphinx_cmd = ['sphinx-build', '-b', 'xml', os.path.dirname(os.path.realpath(__file__)), output_dir] - env = copy.copy(os.environ) - env[ENV_KEY_AZURE_EXTENSION_DIR] = temp_extension_dir - print_status('Executing "{}" with {} set to {}'.format(' '.join(sphinx_cmd), - ENV_KEY_AZURE_EXTENSION_DIR, - env['AZURE_EXTENSION_DIR'])) - check_call(sphinx_cmd, env=env) - finally: - shutil.rmtree(temp_extension_dir) - print_status('Cleaned up temp directory {}'.format(temp_extension_dir)) - print_status('Ref doc output available at {}'.format(output_dir)) - print_status('Done.') - - -def _type_ext_file(val): - ext_file = os.path.realpath(os.path.expanduser(val)) - if os.path.isdir(ext_file): - raise argparse.ArgumentTypeError('{} is a directory not an extension file.'.format(ext_file)) - if not os.path.isfile(ext_file): - raise argparse.ArgumentTypeError('{} does not exist.'.format(ext_file)) - if os.path.splitext(ext_file)[1] != '.whl': - raise argparse.ArgumentTypeError('{} Extension files should end with .whl'.format(ext_file)) - return ext_file - - -def _type_path(val): - out_path = os.path.realpath(os.path.expanduser(val)) - if not os.path.isdir(out_path): - raise argparse.ArgumentTypeError('{} is not a directory. Create it or specify different directory.'.format(out_path)) - if os.listdir(out_path): - raise argparse.ArgumentTypeError('{} is not empty. Empty output directory required.'.format(out_path)) - return out_path - - -# A small command line interface for the script -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Script to generate reference documentation for a single Azure CLI extension.') - - parser.add_argument('-e', '--extension-file', dest='ext_file', - help='Path to the extension .whl file.', required=True, type=_type_ext_file) - parser.add_argument('-o', '--output-dir', dest='output_dir', - help='Path to place the generated documentation. By default, a temporary directory will be created.', required=False, type=_type_path) - - args = parser.parse_args() - generate(args.ext_file, args.output_dir) diff --git a/src/scripts/refdoc/ind.rst b/src/scripts/refdoc/ind.rst deleted file mode 100644 index 5fd5f988675..00000000000 --- a/src/scripts/refdoc/ind.rst +++ /dev/null @@ -1,2 +0,0 @@ -.. azhelpgen:: - \ No newline at end of file diff --git a/src/scripts/run_az.py b/src/scripts/run_az.py deleted file mode 100644 index 9575640951c..00000000000 --- a/src/scripts/run_az.py +++ /dev/null @@ -1,10 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -import sys - -from azure.cli import __main__ as cli_main - -sys.exit(cli_main(sys.argv))