From 3b125dfbc3260c437df0fc94c3e4ef2af86f4e64 Mon Sep 17 00:00:00 2001 From: Andy Churchard Date: Fri, 30 Jun 2023 10:10:46 +0100 Subject: [PATCH 1/7] Use constants directly, no need to assign to instance variables --- .../azext_aosm/generate_nfd/vnf_nfd_generator.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py b/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py index ec2d44a1214..a70a64d4bb6 100644 --- a/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py +++ b/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py @@ -62,17 +62,15 @@ class VnfNfdGenerator(NFDGenerator): def __init__(self, config: VNFConfiguration, order_params: bool, interactive: bool): self.config = config - self.bicep_template_name = VNF_DEFINITION_BICEP_TEMPLATE_FILENAME - self.manifest_template_name = VNF_MANIFEST_BICEP_TEMPLATE_FILENAME self.arm_template_path = self.config.arm_template.file_path self.output_folder_name = self.config.build_output_folder_name self._bicep_path = os.path.join( - self.output_folder_name, self.bicep_template_name + self.output_folder_name, VNF_DEFINITION_BICEP_TEMPLATE_FILENAME ) self._manifest_path = os.path.join( - self.output_folder_name, self.manifest_template_name + self.output_folder_name, VNF_MANIFEST_BICEP_TEMPLATE_FILENAME ) self.order_params = order_params self.interactive = interactive @@ -180,10 +178,10 @@ def write_deployment_parameters(self, folder_path: str) -> None: for key in vm_parameters: if key == self.config.image_name_parameter: - # There is only one correct answer for the image name, so don't ask the + # There is only one correct answer for the image name, so don't ask the # user, instead it is hardcoded in config mappings. continue - + # Order parameters into those without and then with defaults has_default_field = "defaultValue" in self.vm_parameters[key] has_default = ( @@ -245,7 +243,7 @@ def write_deployment_parameters(self, folder_path: str) -> None: f"{OPTIONAL_DEPLOYMENT_PARAMETERS_FILENAME} to help you choose which " "to expose." ) - + def write_template_parameters(self, folder_path: str) -> None: """ Write out the NFD templateParameters.json file. @@ -306,10 +304,10 @@ def copy_to_output_folder(self) -> None: logger.info("Create NFD bicep %s", self.output_folder_name) os.mkdir(self.output_folder_name) - bicep_path = os.path.join(code_dir, "templates", self.bicep_template_name) + bicep_path = os.path.join(code_dir, "templates", VNF_DEFINITION_BICEP_TEMPLATE_FILENAME) shutil.copy(bicep_path, self.output_folder_name) - manifest_path = os.path.join(code_dir, "templates", self.manifest_template_name) + manifest_path = os.path.join(code_dir, "templates", VNF_MANIFEST_BICEP_TEMPLATE_FILENAME) shutil.copy(manifest_path, self.output_folder_name) # Copy everything in the temp folder to the output folder shutil.copytree( From 345546070e4bdaea975b5a2399fd66a2d14f9ad9 Mon Sep 17 00:00:00 2001 From: Andy Churchard Date: Fri, 30 Jun 2023 14:36:57 +0100 Subject: [PATCH 2/7] Refactor VNF file creation and copying --- src/aosm/azext_aosm/_configuration.py | 6 +- src/aosm/azext_aosm/custom.py | 11 +- src/aosm/azext_aosm/deploy/deploy_with_arm.py | 2 +- .../generate_nfd/vnf_nfd_generator.py | 111 ++++++++---------- 4 files changed, 62 insertions(+), 68 deletions(-) diff --git a/src/aosm/azext_aosm/_configuration.py b/src/aosm/azext_aosm/_configuration.py index 4f40ad21917..2cfd09c7ab8 100644 --- a/src/aosm/azext_aosm/_configuration.py +++ b/src/aosm/azext_aosm/_configuration.py @@ -318,10 +318,10 @@ def sa_manifest_name(self) -> str: return f"{sanitized_nf_name}-sa-manifest-{self.version.replace('.', '-')}" @property - def build_output_folder_name(self) -> str: + def output_directory_for_build(self) -> Path: """Return the local folder for generating the bicep template to.""" - arm_template_path = self.arm_template.file_path - return f"{NF_DEFINITION_OUTPUT_BICEP_PREFIX}{Path(str(arm_template_path)).stem}" + arm_template_name = Path(self.arm_template.file_path).stem + return Path(f"{NF_DEFINITION_OUTPUT_BICEP_PREFIX}{arm_template_name}") @dataclass diff --git a/src/aosm/azext_aosm/custom.py b/src/aosm/azext_aosm/custom.py index 78b058c8859..297d7dc4705 100644 --- a/src/aosm/azext_aosm/custom.py +++ b/src/aosm/azext_aosm/custom.py @@ -7,6 +7,7 @@ import os import shutil from dataclasses import asdict +from pathlib import Path from typing import Optional from azure.cli.core.azclierror import ( @@ -116,15 +117,15 @@ def _generate_nfd( "Generate NFD called for unrecognised definition_type. Only VNF and CNF" " have been implemented." ) - if nfd_generator.bicep_path: + if nfd_generator.vnfd_bicep_path: carry_on = input( - f"The folder {os.path.dirname(nfd_generator.bicep_path)} already exists -" + f"The {nfd_generator.vnfd_bicep_path.parent} directory already exists -" " delete it and continue? (y/n)" ) if carry_on != "y": - raise UnclassifiedUserFault("User aborted! ") + raise UnclassifiedUserFault("User aborted!") - shutil.rmtree(os.path.dirname(nfd_generator.bicep_path)) + shutil.rmtree(nfd_generator.vnfd_bicep_path.parent) nfd_generator.generate_nfd() @@ -249,7 +250,7 @@ def _generate_config(configuration_type: str, output_file: str = "input.json"): config = get_configuration(configuration_type) config_as_dict = json.dumps(asdict(config), indent=4) - if os.path.exists(output_file): + if Path(output_file).exists(): carry_on = input( f"The file {output_file} already exists - do you want to overwrite it?" " (y/n)" diff --git a/src/aosm/azext_aosm/deploy/deploy_with_arm.py b/src/aosm/azext_aosm/deploy/deploy_with_arm.py index 36d0b76979e..c9ade56cfe0 100644 --- a/src/aosm/azext_aosm/deploy/deploy_with_arm.py +++ b/src/aosm/azext_aosm/deploy/deploy_with_arm.py @@ -113,7 +113,7 @@ def deploy_vnfd_from_bicep( # User has not passed in a bicep template, so we are deploying the default # one produced from building the NFDV using this CLI bicep_path = os.path.join( - self.config.build_output_folder_name, + self.config.output_directory_for_build, VNF_DEFINITION_BICEP_TEMPLATE_FILENAME, ) diff --git a/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py b/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py index a70a64d4bb6..ddea2a21b19 100644 --- a/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py +++ b/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py @@ -9,6 +9,7 @@ import shutil import tempfile from functools import cached_property +from pathlib import Path from typing import Any, Dict, Optional from knack.log import get_logger @@ -63,18 +64,14 @@ class VnfNfdGenerator(NFDGenerator): def __init__(self, config: VNFConfiguration, order_params: bool, interactive: bool): self.config = config - self.arm_template_path = self.config.arm_template.file_path - self.output_folder_name = self.config.build_output_folder_name + self.arm_template_path = Path(self.config.arm_template.file_path) + self.output_directory: Path = self.config.output_directory_for_build - self._bicep_path = os.path.join( - self.output_folder_name, VNF_DEFINITION_BICEP_TEMPLATE_FILENAME - ) - self._manifest_path = os.path.join( - self.output_folder_name, VNF_MANIFEST_BICEP_TEMPLATE_FILENAME - ) + self._vnfd_bicep_path = Path(self.output_directory, VNF_DEFINITION_BICEP_TEMPLATE_FILENAME) + self._manifest_bicep_path = Path(self.output_directory, VNF_MANIFEST_BICEP_TEMPLATE_FILENAME) self.order_params = order_params self.interactive = interactive - self.tmp_folder_name = "" + self._tmp_dir: Optional[Path] = None self.image_name = f"{self.config.nf_name}Image" def generate_nfd(self) -> None: @@ -83,32 +80,30 @@ def generate_nfd(self) -> None: Create a bicep template for an NFD from the ARM template for the VNF. """ - # Create temporary folder. + # Create temporary directory. with tempfile.TemporaryDirectory() as tmpdirname: - self.tmp_folder_name = tmpdirname + self._tmp_dir = Path(tmpdirname) - self.create_parameter_files() - self.copy_to_output_folder() - print(f"Generated NFD bicep templates created in {self.output_folder_name}") + self._create_parameter_files() + self._copy_to_output_directory() + print(f"Generated NFD bicep templates created in {self.output_directory}") print( "Please review these templates. When you are happy with them run " "`az aosm nfd publish` with the same arguments." ) @property - def bicep_path(self) -> Optional[str]: + def vnfd_bicep_path(self) -> Optional[Path]: """Returns the path to the bicep file for the NFD if it has been created.""" - if os.path.exists(self._bicep_path): - return self._bicep_path - + if self._vnfd_bicep_path.exists(): + return self._vnfd_bicep_path return None @property - def manifest_path(self) -> Optional[str]: + def manifest_bicep_path(self) -> Optional[str]: """Returns the path to the bicep file for the NFD if it has been created.""" - if os.path.exists(self._manifest_path): - return self._manifest_path - + if self._manifest_bicep_path.exists(): + return self._manifest_bicep_path return None @cached_property @@ -149,22 +144,22 @@ def vm_parameters_ordered(self) -> Dict[str, Any]: return {**vm_parameters_no_default, **vm_parameters_with_default} - def create_parameter_files(self) -> None: - """Create the Deployment and Template json parameter files.""" - schemas_folder_path = os.path.join(self.tmp_folder_name, SCHEMAS_DIR_NAME) - os.mkdir(schemas_folder_path) - self.write_deployment_parameters(schemas_folder_path) + def _create_parameter_files(self) -> None: + """Create the deployment, template and VHD parameter files.""" + tmp_schemas_directory: Path = self._tmp_dir / SCHEMAS_DIR_NAME + tmp_schemas_directory.mkdir() + self.write_deployment_parameters(tmp_schemas_directory) - mappings_folder_path = os.path.join(self.tmp_folder_name, CONFIG_MAPPINGS_DIR_NAME) - os.mkdir(mappings_folder_path) - self.write_template_parameters(mappings_folder_path) - self.write_vhd_parameters(mappings_folder_path) + tmp_mappings_directory: Path = self._tmp_dir / CONFIG_MAPPINGS_DIR_NAME + tmp_mappings_directory.mkdir() + self.write_template_parameters(tmp_mappings_directory) + self.write_vhd_parameters(tmp_mappings_directory) - def write_deployment_parameters(self, folder_path: str) -> None: + def write_deployment_parameters(self, directory: Path) -> None: """ - Write out the NFD deploymentParameters.json file. + Write out the NFD deploymentParameters.json file to `directory` - :param folder_path: The folder to put this file in. + :param directory: The directory to put this file in. """ logger.debug("Create deploymentParameters.json") @@ -211,7 +206,7 @@ def write_deployment_parameters(self, folder_path: str) -> None: for key in vm_parameters_to_exclude: self.vm_parameters.pop(key, None) - deployment_parameters_path = os.path.join(folder_path, DEPLOYMENT_PARAMETERS_FILENAME) + deployment_parameters_path = directory / DEPLOYMENT_PARAMETERS_FILENAME # Heading for the deployParameters schema deploy_parameters_full: Dict[str, Any] = SCHEMA_PREFIX @@ -230,9 +225,7 @@ def write_deployment_parameters(self, folder_path: str) -> None: # Extra output file to help the user know which parameters are optional if not self.interactive: if nfd_parameters_with_default: - optional_deployment_parameters_path = os.path.join( - folder_path, OPTIONAL_DEPLOYMENT_PARAMETERS_FILENAME - ) + optional_deployment_parameters_path = directory / OPTIONAL_DEPLOYMENT_PARAMETERS_FILENAME with open( optional_deployment_parameters_path, "w", encoding="utf-8" ) as _file: @@ -244,11 +237,11 @@ def write_deployment_parameters(self, folder_path: str) -> None: "to expose." ) - def write_template_parameters(self, folder_path: str) -> None: + def write_template_parameters(self, directory: Path) -> None: """ - Write out the NFD templateParameters.json file. + Write out the NFD templateParameters.json file to `directory`. - :param folder_path: The folder to put this file in. + :param directory: The directory to put this file in. """ logger.debug("Create %s", TEMPLATE_PARAMETERS_FILENAME) vm_parameters = ( @@ -264,18 +257,18 @@ def write_template_parameters(self, folder_path: str) -> None: template_parameters[key] = f"{{deployParameters.{key}}}" - template_parameters_path = os.path.join(folder_path, TEMPLATE_PARAMETERS_FILENAME) + template_parameters_path = directory / TEMPLATE_PARAMETERS_FILENAME with open(template_parameters_path, "w", encoding="utf-8") as _file: _file.write(json.dumps(template_parameters, indent=4)) logger.debug("%s created", template_parameters_path) - def write_vhd_parameters(self, folder_path: str) -> None: + def write_vhd_parameters(self, directory: Path) -> None: """ - Write out the NFD vhdParameters.json file. + Write out the NFD vhdParameters.json file to `directory`. - :param folder_path: The folder to put this file in. + :param directory: The directory to put this file in. """ azureDeployLocation: str if self.vm_parameters.get("location"): @@ -291,29 +284,29 @@ def write_vhd_parameters(self, folder_path: str) -> None: "azureDeployLocation": azureDeployLocation, } - vhd_parameters_path = os.path.join(folder_path, VHD_PARAMETERS_FILENAME) + vhd_parameters_path = directory / VHD_PARAMETERS_FILENAME with open(vhd_parameters_path, "w", encoding="utf-8") as _file: _file.write(json.dumps(vhd_parameters, indent=4)) logger.debug("%s created", vhd_parameters_path) - def copy_to_output_folder(self) -> None: - """Copy the bicep templates, config mappings and schema into the build output folder.""" - code_dir = os.path.dirname(__file__) + def _copy_to_output_directory(self) -> None: + """Copy the static bicep templates and generated config mappings and schema into the build output directory.""" + logger.info("Create NFD bicep %s", self.output_directory) + Path(self.output_directory).mkdir(exist_ok=True) - logger.info("Create NFD bicep %s", self.output_folder_name) - os.mkdir(self.output_folder_name) + static_bicep_templates_dir = Path(__file__).parent / "templates" - bicep_path = os.path.join(code_dir, "templates", VNF_DEFINITION_BICEP_TEMPLATE_FILENAME) - shutil.copy(bicep_path, self.output_folder_name) + static_vnfd_bicep_path = static_bicep_templates_dir / VNF_DEFINITION_BICEP_TEMPLATE_FILENAME + shutil.copy(static_vnfd_bicep_path, self.output_directory) - manifest_path = os.path.join(code_dir, "templates", VNF_MANIFEST_BICEP_TEMPLATE_FILENAME) - shutil.copy(manifest_path, self.output_folder_name) - # Copy everything in the temp folder to the output folder + static_manifest_bicep_path = static_bicep_templates_dir / VNF_MANIFEST_BICEP_TEMPLATE_FILENAME + shutil.copy(static_manifest_bicep_path, self.output_directory) + # Copy everything in the temp directory to the output directory shutil.copytree( - self.tmp_folder_name, - self.output_folder_name, + self._tmp_dir, + self.output_directory, dirs_exist_ok=True, ) - logger.info("Copied files to %s", self.output_folder_name) + logger.info("Copied files to %s", self.output_directory) From 532d859737134b44224ec86b266cb5558720a743 Mon Sep 17 00:00:00 2001 From: Sunny Carter Date: Fri, 30 Jun 2023 17:08:49 +0100 Subject: [PATCH 3/7] Sundry fixes for CNF quickstart --- src/aosm/HISTORY.rst | 7 + src/aosm/azext_aosm/_configuration.py | 29 +- src/aosm/azext_aosm/_params.py | 9 +- src/aosm/azext_aosm/custom.py | 10 +- src/aosm/azext_aosm/deploy/artifact.py | 33 ++- src/aosm/azext_aosm/deploy/deploy_with_arm.py | 264 +++++++++++------- src/aosm/azext_aosm/deploy/pre_deploy.py | 34 ++- .../generate_nfd/cnf_nfd_generator.py | 156 ++++++----- .../azext_aosm/generate_nsd/nsd_generator.py | 46 ++- ...nf_template.bicep => nf_template.bicep.j2} | 25 +- ...d_template.bicep => nsd_template.bicep.j2} | 7 +- src/aosm/azext_aosm/util/constants.py | 17 +- 12 files changed, 406 insertions(+), 231 deletions(-) rename src/aosm/azext_aosm/generate_nsd/templates/{nf_template.bicep => nf_template.bicep.j2} (79%) rename src/aosm/azext_aosm/generate_nsd/templates/{nsd_template.bicep => nsd_template.bicep.j2} (92%) diff --git a/src/aosm/HISTORY.rst b/src/aosm/HISTORY.rst index 836fcf03607..ae1c3842448 100644 --- a/src/aosm/HISTORY.rst +++ b/src/aosm/HISTORY.rst @@ -9,6 +9,13 @@ unreleased * NFDV version exposed as a CGV on an SNS. * `az aosm nfd publish` option added for `--definition-type cnf` to publish the CNF bicep templates, upload helm charts from disk to the ACR and copy the images from a source ACR to the target ACR. * Managed Identity added to VNF NF templates - requires subscription to be registered for the feature flag. +* Various fixes to NFD build of deployParameters schema and interactive mode create of deployParameters mappings file. +* Fix CNF NFD publish so that it doesn't render the ACR unuseable for future Artifact publishing. +* Allow CNF NFD image copy from a source ACR using a namespace. +* Fix - Add new CGSchema parameters not from the NFD to the `required` section of the schema. +* Add the ability to skip bicep publish or artifact upload during publish commands. +* Fix Manifest name for NSDs so it isn't the same as that for NFDs +* Add validation of source_registry_id format for CNF configuration 0.2.0 ++++++ diff --git a/src/aosm/azext_aosm/_configuration.py b/src/aosm/azext_aosm/_configuration.py index 23de58ce64d..1c41cf562d0 100644 --- a/src/aosm/azext_aosm/_configuration.py +++ b/src/aosm/azext_aosm/_configuration.py @@ -2,6 +2,7 @@ # pylint: disable=simplifiable-condition import os +import re from dataclasses import dataclass, field from pathlib import Path from typing import Any, Dict, List, Optional @@ -15,6 +16,7 @@ NSD, NSD_DEFINITION_OUTPUT_BICEP_PREFIX, VNF, + SOURCE_ACR_REGEX ) DESCRIPTION_MAP: Dict[str, str] = { @@ -74,6 +76,12 @@ "source_registry_id": "Resource ID of the source acr registry from which to pull " "the image", + "source_registry_namespace": + "Optional. Namespace of the repository of the source acr registry from which " + "to pull. For example if your repository is samples/prod/nginx then set this to" + " samples/prod . Leave blank if the image is in the root namespace." + "See https://learn.microsoft.com/en-us/azure/container-registry/" + "container-registry-best-practices#repository-namespaces for further details.", } @@ -197,7 +205,9 @@ def network_function_name(self) -> str: def acr_manifest_name(self) -> str: """Return the ACR manifest name from the NFD name.""" sanitised_nf_name = self.network_function_name.lower().replace('_', '-') - return f"{sanitised_nf_name}-acr-manifest-{self.nsd_version.replace('.', '-')}" + return ( + f"{sanitised_nf_name}-nsd-acr-manifest-{self.nsd_version.replace('.', '-')}" + ) @property def nfvi_site_name(self) -> str: @@ -222,7 +232,7 @@ def arm_template(self) -> ArtifactConfig: @property def arm_template_artifact_name(self) -> str: """Return the artifact name for the ARM template.""" - return f"{self.network_function_definition_group_name}_nfd_artifact" + return f"{self.network_function_definition_group_name}-nfd-artifact" @dataclass @@ -310,6 +320,7 @@ class HelmPackageConfig: @dataclass class CNFConfiguration(NFConfiguration): source_registry_id: str = DESCRIPTION_MAP["source_registry_id"] + source_registry_namespace: str = DESCRIPTION_MAP["source_registry_namespace"] helm_packages: List[Any] = field(default_factory=lambda: [HelmPackageConfig()]) def __post_init__(self): @@ -327,6 +338,20 @@ def build_output_folder_name(self) -> str: """Return the local folder for generating the bicep template to.""" return f"{DEFINITION_OUTPUT_BICEP_PREFIX}{self.nf_name}" + def validate(self): + """Validate the CNF config + + :raises ValidationError: If source registry ID doesn't match the regex + """ + if self.source_registry_id == DESCRIPTION_MAP["source_registry_id"]: + # Config has not been filled in. Don't validate. + return + + source_registry_match = re.search(SOURCE_ACR_REGEX, self.source_registry_id) + if not source_registry_match or len(source_registry_match.groups()) < 2: + raise ValidationError( + "CNF config has an invalid source registry ID. Please run `az aosm " + "nfd generate-config` to see the valid formats.") def get_configuration( configuration_type: str, config_as_dict: Optional[Dict[Any, Any]] = None diff --git a/src/aosm/azext_aosm/_params.py b/src/aosm/azext_aosm/_params.py index b7e15796e0f..ab21d6348a5 100644 --- a/src/aosm/azext_aosm/_params.py +++ b/src/aosm/azext_aosm/_params.py @@ -7,7 +7,7 @@ from argcomplete.completers import FilesCompleter from azure.cli.core import AzCommandsLoader -from .util.constants import CNF, VNF +from .util.constants import CNF, VNF, BICEP_PUBLISH, ARTIFACT_UPLOAD def load_arguments(self: AzCommandsLoader, _): @@ -18,6 +18,7 @@ def load_arguments(self: AzCommandsLoader, _): ) definition_type = get_enum_type([VNF, CNF]) + skip_steps = get_enum_type([BICEP_PUBLISH, ARTIFACT_UPLOAD]) # Set the argument context so these options are only available when this specific command # is called. @@ -89,6 +90,9 @@ def load_arguments(self: AzCommandsLoader, _): completer=FilesCompleter(allowednames="*.json"), help="Optional path to a parameters file for the manifest definition file. Use to override publish of the built definition and config with alternative parameters.", ) + c.argument( + "skip", arg_type=skip_steps, help="Optional skip steps" + ) with self.argument_context("aosm nsd") as c: c.argument( @@ -98,3 +102,6 @@ def load_arguments(self: AzCommandsLoader, _): completer=FilesCompleter(allowednames="*.json"), help="The path to the configuration file.", ) + c.argument( + "skip", arg_type=skip_steps, help="Optional skip steps" + ) diff --git a/src/aosm/azext_aosm/custom.py b/src/aosm/azext_aosm/custom.py index 6885a31276d..ff45db84666 100644 --- a/src/aosm/azext_aosm/custom.py +++ b/src/aosm/azext_aosm/custom.py @@ -134,6 +134,7 @@ def publish_definition( parameters_json_file: Optional[str] = None, manifest_file: Optional[str] = None, manifest_parameters_json_file: Optional[str] = None, + skip: Optional[str] = None, ): """ Publish a generated definition. @@ -153,6 +154,7 @@ def publish_definition( manifests :param manifest_parameters_json_file: Optional path to an override bicep parameters file for manifest parameters + :param skip: options to skip, either publish bicep or upload artifacts """ print("Publishing definition.") api_clients = ApiClients( @@ -172,6 +174,7 @@ def publish_definition( parameters_json_file=parameters_json_file, manifest_bicep_path=manifest_file, manifest_parameters_json_file=manifest_parameters_json_file, + skip=skip ) elif definition_type == CNF: deployer = DeployerViaArm(api_clients, config=config) @@ -181,6 +184,7 @@ def publish_definition( parameters_json_file=parameters_json_file, manifest_bicep_path=manifest_file, manifest_parameters_json_file=manifest_parameters_json_file, + skip=skip ) else: raise ValueError( @@ -219,7 +223,8 @@ def delete_published_definition( delly.delete_nfd(clean=clean) else: raise ValueError( - f"Definition type must be either 'vnf' or 'cnf'. Definition type {definition_type} is not recognised." + "Definition type must be either 'vnf' or 'cnf'. " + f"Definition type {definition_type} is not recognised." ) @@ -320,6 +325,7 @@ def publish_design( parameters_json_file: Optional[str] = None, manifest_file: Optional[str] = None, manifest_parameters_json_file: Optional[str] = None, + skip: Optional[str] = None, ): """ Publish a generated design. @@ -337,6 +343,7 @@ def publish_design( manifests :param manifest_parameters_json_file: Optional path to an override bicep parameters file for manifest parameters + :param skip: options to skip, either publish bicep or upload artifacts """ print("Publishing design.") @@ -355,6 +362,7 @@ def publish_design( parameters_json_file=parameters_json_file, manifest_bicep_path=manifest_file, manifest_parameters_json_file=manifest_parameters_json_file, + skip=skip ) diff --git a/src/aosm/azext_aosm/deploy/artifact.py b/src/aosm/azext_aosm/deploy/artifact.py index 905e715693c..441f119bac3 100644 --- a/src/aosm/azext_aosm/deploy/artifact.py +++ b/src/aosm/azext_aosm/deploy/artifact.py @@ -4,19 +4,21 @@ # pylint: disable=unidiomatic-typecheck """A module to handle interacting with artifacts.""" from dataclasses import dataclass -from typing import Union +from typing import Union, List import subprocess from knack.log import get_logger from oras.client import OrasClient +from azure.cli.core.commands import LongRunningOperation +from azure.mgmt.containerregistry import ContainerRegistryManagementClient from azure.storage.blob import BlobClient, BlobType -from azext_aosm._configuration import ArtifactConfig, HelmPackageConfig from azure.mgmt.containerregistry.models import ( ImportImageParameters, ImportSource, ) -from azure.cli.core.commands import LongRunningOperation +from azext_aosm._configuration import ArtifactConfig, HelmPackageConfig + logger = get_logger(__name__) @@ -89,6 +91,12 @@ def _upload_helm_to_acr(self, artifact_config: HelmPackageConfig) -> None: # helm push "$chart_path" "$target_registry" push_command = ["helm", "push", chart_path, target_registry] subprocess.run(push_command, check=True) + + # If we don't logout from the registry, future Artifact uploads to this ACR + # will fail with an UNAUTHORIZED error. There is no az acr logout command, but + # it is a wrapper around docker, so a call to docker logout will work. + logout_command = ["docker", "logout", registry] + subprocess.run(logout_command, check=True) def _upload_to_storage_account(self, artifact_config: ArtifactConfig) -> None: """ @@ -132,12 +140,13 @@ def _upload_to_storage_account(self, artifact_config: ArtifactConfig) -> None: def copy_image( self, cli_ctx, - container_registry_client, - source_registry_id, - source_image, - target_registry_resource_group_name, - target_registry_name, - mode="NoForce", + container_registry_client: ContainerRegistryManagementClient, + source_registry_id: str, + source_image: str, + target_registry_resource_group_name: str, + target_registry_name: str, + target_tags: List[str], + mode: str = "NoForce", ): """ Copy image from one ACR to another. @@ -148,9 +157,10 @@ def copy_image( :param source_image: source image :param target_registry_resource_group_name: target registry resource group name :param target_registry_name: target registry name + :param target_tags: the list of tags to be applied to the imported image + should be of form: namepace/name:tag or name:tag :param mode: mode for import """ - target_tags = [source_image] source = ImportSource(resource_id=source_registry_id, source_image=source_image) @@ -174,7 +184,8 @@ def copy_image( ) except Exception as error: logger.error( - "Failed to import %s to %s. Check if this image exists in the source registry or is already present in the target registry.", + "Failed to import %s to %s. Check if this image exists in the source " + "registry or is already present in the target registry.", source_image, target_registry_name, ) diff --git a/src/aosm/azext_aosm/deploy/deploy_with_arm.py b/src/aosm/azext_aosm/deploy/deploy_with_arm.py index 3c7dfa95eb9..da9eb371ac9 100644 --- a/src/aosm/azext_aosm/deploy/deploy_with_arm.py +++ b/src/aosm/azext_aosm/deploy/deploy_with_arm.py @@ -5,6 +5,7 @@ """Contains class for deploying generated definitions using ARM.""" import json import os +import re import shutil import subprocess # noqa import tempfile @@ -25,6 +26,8 @@ from azext_aosm.util.management_clients import ApiClients from azext_aosm.deploy.pre_deploy import PreDeployerViaSDK from azext_aosm.util.constants import ( + ARTIFACT_UPLOAD, + BICEP_PUBLISH, NF_DEFINITION_BICEP_FILE, NSD, NSD_ARTIFACT_MANIFEST_BICEP_FILE, @@ -35,6 +38,7 @@ VNF, VNF_DEFINITION_BICEP_TEMPLATE, VNF_MANIFEST_BICEP_TEMPLATE, + SOURCE_ACR_REGEX, ) from azext_aosm.util.management_clients import ApiClients @@ -87,6 +91,7 @@ def deploy_vnfd_from_bicep( parameters_json_file: Optional[str] = None, manifest_bicep_path: Optional[str] = None, manifest_parameters_json_file: Optional[str] = None, + skip: Optional[str] = None ) -> None: """ Deploy the bicep template defining the VNFD. @@ -106,49 +111,59 @@ def deploy_vnfd_from_bicep( :param manifest_bicep_path: The path to the bicep template of the manifest :manifest_parameters_json_file: path to an override file of set parameters for the manifest + :param skip: options to skip, either publish bicep or upload artifacts """ assert isinstance(self.config, VNFConfiguration) - if not bicep_path: - # User has not passed in a bicep template, so we are deploying the default - # one produced from building the NFDV using this CLI - bicep_path = os.path.join( - self.config.build_output_folder_name, - VNF_DEFINITION_BICEP_TEMPLATE, - ) + if not skip == BICEP_PUBLISH: + if not bicep_path: + # User has not passed in a bicep template, so we are deploying the default + # one produced from building the NFDV using this CLI + bicep_path = os.path.join( + self.config.build_output_folder_name, + VNF_DEFINITION_BICEP_TEMPLATE, + ) - if parameters_json_file: - parameters = self.read_parameters_from_file(parameters_json_file) + if parameters_json_file: + parameters = self.read_parameters_from_file(parameters_json_file) - else: - # User has not passed in parameters file, so we use the parameters required - # from config for the default bicep template produced from building the - # NFDV using this CLI - logger.debug("Create parameters for default NFDV template.") - parameters = self.construct_vnfd_parameters() - - logger.debug(parameters) - - # Create or check required resources - deploy_manifest_template = not self.nfd_predeploy(definition_type=VNF) - if deploy_manifest_template: - self.deploy_manifest_template( - manifest_parameters_json_file, manifest_bicep_path, VNF + else: + # User has not passed in parameters file, so we use the parameters + # required from config for the default bicep template produced from + # building the NFDV using this CLI + logger.debug("Create parameters for default NFDV template.") + parameters = self.construct_vnfd_parameters() + + logger.debug(parameters) + + # Create or check required resources + deploy_manifest_template = not self.nfd_predeploy(definition_type=VNF) + if deploy_manifest_template: + self.deploy_manifest_template( + manifest_parameters_json_file, manifest_bicep_path, VNF + ) + else: + print( + f"Artifact manifests exist for NFD {self.config.nf_name} " + f"version {self.config.version}" + ) + message = ( + f"Deploy bicep template for NFD {self.config.nf_name} " + f"version {self.config.version} " + f"into {self.config.publisher_resource_group_name} under publisher " + f"{self.config.publisher_name}" ) + print(message) + logger.info(message) + self.deploy_bicep_template(bicep_path, parameters) + print(f"Deployed NFD {self.config.nf_name} version {self.config.version}.") else: - print( - f"Artifact manifests exist for NFD {self.config.nf_name} " - f"version {self.config.version}" - ) - message = ( - f"Deploy bicep template for NFD {self.config.nf_name} version {self.config.version} " - f"into {self.config.publisher_resource_group_name} under publisher " - f"{self.config.publisher_name}" - ) - print(message) - logger.info(message) - self.deploy_bicep_template(bicep_path, parameters) - print(f"Deployed NFD {self.config.nf_name} version {self.config.version}.") + print("Skipping bicep publish") + + if skip == ARTIFACT_UPLOAD: + print("Skipping artifact upload") + print("Done") + return storage_account_manifest = ArtifactManifestOperator( self.config, @@ -262,6 +277,7 @@ def deploy_cnfd_from_bicep( parameters_json_file: Optional[str] = None, manifest_bicep_path: Optional[str] = None, manifest_parameters_json_file: Optional[str] = None, + skip: Optional[str] = None ) -> None: """ Deploy the bicep template defining the CNFD. @@ -275,50 +291,60 @@ def deploy_cnfd_from_bicep( :param manifest_bicep_path: The path to the bicep template of the manifest :param manifest_parameters_json_file: path to an override file of set parameters for the manifest + :param skip: options to skip, either publish bicep or upload artifacts """ assert isinstance(self.config, CNFConfiguration) - if not bicep_path: - # User has not passed in a bicep template, so we are deploying the default - # one produced from building the NFDV using this CLI - bicep_path = os.path.join( - self.config.build_output_folder_name, - CNF_DEFINITION_BICEP_TEMPLATE, - ) + if not skip == BICEP_PUBLISH: + if not bicep_path: + # User has not passed in a bicep template, so we are deploying the + # default one produced from building the NFDV using this CLI + bicep_path = os.path.join( + self.config.build_output_folder_name, + CNF_DEFINITION_BICEP_TEMPLATE, + ) - if parameters_json_file: - parameters = self.read_parameters_from_file(parameters_json_file) - else: - # User has not passed in parameters file, so we use the parameters required - # from config for the default bicep template produced from building the - # NFDV using this CLI - logger.debug("Create parameters for default NFDV template.") - parameters = self.construct_cnfd_parameters() + if parameters_json_file: + parameters = self.read_parameters_from_file(parameters_json_file) + else: + # User has not passed in parameters file, so we use the parameters + # required from config for the default bicep template produced from + # building the NFDV using this CLI + logger.debug("Create parameters for default NFDV template.") + parameters = self.construct_cnfd_parameters() - logger.debug( - f"Parameters used for CNF definition bicep deployment: {parameters}" - ) + logger.debug( + f"Parameters used for CNF definition bicep deployment: {parameters}" + ) - # Create or check required resources - deploy_manifest_template = not self.nfd_predeploy(definition_type=CNF) - if deploy_manifest_template: - self.deploy_manifest_template( - manifest_parameters_json_file, manifest_bicep_path, CNF + # Create or check required resources + deploy_manifest_template = not self.nfd_predeploy(definition_type=CNF) + if deploy_manifest_template: + self.deploy_manifest_template( + manifest_parameters_json_file, manifest_bicep_path, CNF + ) + else: + print( + f"Artifact manifests exist for NFD {self.config.nf_name} " + f"version {self.config.version}" + ) + message = ( + f"Deploy bicep template for NFD {self.config.nf_name} " + f"version {self.config.version} " + f"into {self.config.publisher_resource_group_name} under publisher " + f"{self.config.publisher_name}" ) + print(message) + logger.info(message) + self.deploy_bicep_template(bicep_path, parameters) + print(f"Deployed NFD {self.config.nf_name} version {self.config.version}.") else: - print( - f"Artifact manifests exist for NFD {self.config.nf_name} " - f"version {self.config.version}" - ) - message = ( - f"Deploy bicep template for NFD {self.config.nf_name} version {self.config.version} " - f"into {self.config.publisher_resource_group_name} under publisher " - f"{self.config.publisher_name}" - ) - print(message) - logger.info(message) - self.deploy_bicep_template(bicep_path, parameters) - print(f"Deployed NFD {self.config.nf_name} version {self.config.version}.") + print("Skipping bicep publish") + + if skip == ARTIFACT_UPLOAD: + print("Skipping artifact upload") + print("Done") + return acr_properties = self.api_clients.aosm_client.artifact_stores.get( resource_group_name=self.config.publisher_resource_group_name, @@ -329,6 +355,16 @@ def deploy_cnfd_from_bicep( target_registry_resource_group_name = acr_properties.storage_resource_id.split( "/" )[-5] + # Check whether the source registry has a namespace in the repository path + source_registry_match = re.search( + SOURCE_ACR_REGEX, + self.config.source_registry_id + ) + # Config validation has already checked and raised an error if the regex doesn't + # match + source_registry_namespace: str = "" + if self.config.source_registry_namespace: + source_registry_namespace = f"{self.config.source_registry_namespace}/" acr_manifest = ArtifactManifestOperator( self.config, @@ -360,7 +396,8 @@ def deploy_cnfd_from_bicep( artifact_dictionary.pop(helm_package_name) - # All the remaining artifacts are not in the helm_packages list. We assume that they are images that need to be copied from another ACR. + # All the remaining artifacts are not in the helm_packages list. We assume that + # they are images that need to be copied from another ACR. for artifact in artifact_dictionary.values(): assert isinstance(artifact, Artifact) @@ -369,9 +406,13 @@ def deploy_cnfd_from_bicep( cli_ctx=cli_ctx, container_registry_client=self.api_clients.container_registry_client, source_registry_id=self.config.source_registry_id, - source_image=f"{artifact.artifact_name}:{artifact.artifact_version}", + source_image=( + f"{source_registry_namespace}{artifact.artifact_name}" + f":{artifact.artifact_version}" + ), target_registry_resource_group_name=target_registry_resource_group_name, target_registry_name=target_registry_name, + target_tags=[f"{artifact.artifact_name}:{artifact.artifact_version}"], ) print("Done") @@ -382,6 +423,7 @@ def deploy_nsd_from_bicep( parameters_json_file: Optional[str] = None, manifest_bicep_path: Optional[str] = None, manifest_parameters_json_file: Optional[str] = None, + skip: Optional[str] = None, ) -> None: """ Deploy the bicep template defining the VNFD. @@ -393,49 +435,55 @@ def deploy_nsd_from_bicep( :parameters_json_file: path to an override file of set parameters for the nfdv :param manifest_bicep_path: The path to the bicep template of the manifest :param manifest_parameters_json_file: path to an override file of set parameters for the manifest + :param skip: options to skip, either publish bicep or upload artifacts """ assert isinstance(self.config, NSConfiguration) + if not skip == BICEP_PUBLISH: + if not bicep_path: + # User has not passed in a bicep template, so we are deploying the default + # one produced from building the NSDV using this CLI + bicep_path = os.path.join( + self.config.build_output_folder_name, + NSD_DEFINITION_BICEP_FILE, + ) - if not bicep_path: - # User has not passed in a bicep template, so we are deploying the default - # one produced from building the NSDV using this CLI - bicep_path = os.path.join( - self.config.build_output_folder_name, - NSD_DEFINITION_BICEP_FILE, - ) + if parameters_json_file: + parameters = self.read_parameters_from_file(parameters_json_file) + else: + # User has not passed in parameters file, so we use the parameters required + # from config for the default bicep template produced from building the + # NSDV using this CLI + logger.debug("Create parameters for default NSDV template.") + parameters = self.construct_nsd_parameters() - if parameters_json_file: - parameters = self.read_parameters_from_file(parameters_json_file) - else: - # User has not passed in parameters file, so we use the parameters required - # from config for the default bicep template produced from building the - # NSDV using this CLI - logger.debug("Create parameters for default NSDV template.") - parameters = self.construct_nsd_parameters() + logger.debug(parameters) - logger.debug(parameters) + # Create or check required resources + deploy_manifest_template = not self.nsd_predeploy() - # Create or check required resources - deploy_manifest_template = not self.nsd_predeploy() + if deploy_manifest_template: + self.deploy_manifest_template( + manifest_parameters_json_file, manifest_bicep_path, NSD + ) + else: + print(f"Artifact manifests {self.config.acr_manifest_name} already exists") - if deploy_manifest_template: - self.deploy_manifest_template( - manifest_parameters_json_file, manifest_bicep_path, NSD + message = ( + f"Deploy bicep template for NSDV {self.config.nsd_version} " + f"into {self.config.publisher_resource_group_name} under publisher " + f"{self.config.publisher_name}" ) - else: - print(f"Artifact manifests {self.config.acr_manifest_name} already exists") + print(message) + logger.info(message) + self.deploy_bicep_template(bicep_path, parameters) + print( + f"Deployed NSD {self.config.nsdg_name} version {self.config.nsd_version}." + ) + if skip == ARTIFACT_UPLOAD: + print("Skipping artifact upload") + print("Done") + return - message = ( - f"Deploy bicep template for NSDV {self.config.nsd_version} " - f"into {self.config.publisher_resource_group_name} under publisher " - f"{self.config.publisher_name}" - ) - print(message) - logger.info(message) - self.deploy_bicep_template(bicep_path, parameters) - print( - f"Deployed NSD {self.config.acr_manifest_name} version {self.config.nsd_version}." - ) acr_manifest = ArtifactManifestOperator( self.config, self.api_clients, diff --git a/src/aosm/azext_aosm/deploy/pre_deploy.py b/src/aosm/azext_aosm/deploy/pre_deploy.py index b83ed1b3b52..5446dd5baf0 100644 --- a/src/aosm/azext_aosm/deploy/pre_deploy.py +++ b/src/aosm/azext_aosm/deploy/pre_deploy.py @@ -4,12 +4,19 @@ # -------------------------------------------------------------------------------------- """Contains class for deploying resources required by NFDs/NSDs via the SDK.""" +import re from azure.cli.core.azclierror import AzCLIError from azure.core import exceptions as azure_exceptions from azure.mgmt.resource.resources.models import ResourceGroup from knack.log import get_logger -from azext_aosm._configuration import NFConfiguration, NSConfiguration, VNFConfiguration +from azext_aosm._configuration import ( + NFConfiguration, + NSConfiguration, + VNFConfiguration, + CNFConfiguration +) +from azext_aosm.util.constants import SOURCE_ACR_REGEX from azext_aosm.util.management_clients import ApiClients from azext_aosm.vendored_sdks.models import ( ArtifactStore, @@ -136,22 +143,25 @@ def ensure_config_source_registry_exists(self) -> None: Finds the parameters from self.config """ + assert isinstance(self.config, CNFConfiguration) logger.info( "Check if the source registry %s exists", self.config.source_registry_id, ) - # Assume that the registry id is of the form: /subscriptions//resourceGroups//providers/Microsoft.ContainerRegistry/registries/ - source_registry_name = self.config.source_registry_id.split("/")[-1] - source_registry_resource_group_name = self.config.source_registry_id.split("/")[ - -5 - ] - - # This will raise an error if the registry does not exist - self.api_clients.container_registry_client.get( - resource_group_name=source_registry_resource_group_name, - registry_name=source_registry_name, - ) + # Match the source registry format + source_registry_match = re.search(SOURCE_ACR_REGEX, self.config.source_registry_id) + # Config validation has already checked and raised an error if the regex doesn't + # match + if source_registry_match and len(source_registry_match.groups()) > 1: + source_registry_resource_group_name = source_registry_match.group(1) + source_registry_name = source_registry_match.group(2) + + # This will raise an error if the registry does not exist + self.api_clients.container_registry_client.get( + resource_group_name=source_registry_resource_group_name, + registry_name=source_registry_name, + ) def ensure_artifact_store_exists( self, diff --git a/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py b/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py index 5af6014d87c..506a70e04b0 100644 --- a/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py +++ b/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py @@ -199,7 +199,7 @@ def _generate_chart_value_mappings(self, helm_package: HelmPackageConfig) -> Non top_level_values_yaml = self._read_top_level_values_yaml(helm_package) mapping_to_write = self._replace_values_with_deploy_params( - top_level_values_yaml, {} + top_level_values_yaml, None ) # Write the mapping to a file @@ -401,21 +401,36 @@ def find_pattern_matches_in_chart( for file in self._find_yaml_files(chart_dir): with open(file, "r", encoding="UTF-8") as f: + logger.debug("Searching for %s in %s", start_string, file) for line in f: if start_string in line: + logger.debug("Found %s in %s", start_string, line) path = re.findall(IMAGE_PATH_REGEX, line) # If "image:", search for chart name and version if start_string == IMAGE_START_STRING: name_and_version = re.search( IMAGE_NAME_AND_VERSION_REGEX, line ) - matches.append( - ( - path, + logger.debug( + "Regex match for name and version is %s", + name_and_version + ) + + if name_and_version and len(name_and_version.groups()) == 2: + logger.debug( + "Found image name and version %s %s", name_and_version.group(1), - name_and_version.group(2), + name_and_version.group(2) ) - ) + matches.append( + ( + path, + name_and_version.group(1), + name_and_version.group(2), + ) + ) + else: + logger.debug("No image name and version found") else: matches += path return matches @@ -486,17 +501,23 @@ def get_chart_mapping_schema( deploy_params_dict = self.traverse_dict( values_data, DEPLOYMENT_PARAMETER_MAPPING_REGEX ) + logger.debug("Deploy params dict is %s", deploy_params_dict) new_schema = self.search_schema(deploy_params_dict, schema_data) except KeyError as e: raise InvalidTemplateError( - f"ERROR: There is a problem with your schema or values for the helm package '{helm_package.name}'. \ + "ERROR: There is a problem with your schema or " + f"values for the helm package '{helm_package.name}'. \ Please fix this and run the command again." ) from e logger.debug("Generated chart mapping schema for %s", helm_package.name) return new_schema - def traverse_dict(self, d, target): + def traverse_dict( + self, + dict_to_search: Dict[Any, Any], + target_regex: str + ) -> Dict[str, List[str]]: """ Traverse the dictionary that is loaded from the file provided by path_to_mappings in the input.json. @@ -507,7 +528,8 @@ def traverse_dict(self, d, target): :param d: The dictionary to traverse. :param target: The regex to search for. """ - stack = [(d, [])] # Initialize the stack with the dictionary and an empty path + # Initialize the stack with the dictionary and an empty path + stack = [(dict_to_search, [])] result = {} # Initialize empty dictionary to store the results while stack: # While there are still items in the stack # Pop the last item from the stack and unpack it into node (the dictionary) and path @@ -522,24 +544,42 @@ def traverse_dict(self, d, target): ) # If the value is a string + matches target regex elif isinstance(v, str) and re.search( - target, v + target_regex, v ): # Take the match i.e, foo from {deployParameter.foo} match = re.search( - target, v + target_regex, v ) # Add it to the result dictionary with its path as the value result[match.group(1)] = path + [ k ] elif isinstance(v, list): + logger.debug("Found a list %s", v) for i in v: - if isinstance(i, str) and re.search(target, i): - match = re.search(target, i) + logger.debug("Found an item %s", i) + if isinstance(i, str) and re.search(target_regex, i): + match = re.search(target_regex, i) result[match.group(1)] = path + [k] + elif isinstance(i, dict): + stack.append((i, path + [k])) + elif isinstance(i, list): + # We should fix this but for now just log a warning and + # carry on + logger.warning( + "Values mapping file contains a list of lists " + "at path %s, which this tool cannot parse. " + "Please check the output configMappings and schemas " + "files and check that they are as required.", + path + [k] + ) return result - def search_schema(self, result, full_schema): + def search_schema( + self, + deployParams_paths: Dict[str, List[str]], + full_schema + ) -> Dict[str, Dict[str, str]]: """ Search through provided schema for the types of the deployment parameters. This assumes that the type of the key will be the type of the deployment parameter. @@ -548,24 +588,41 @@ def search_schema(self, result, full_schema): Returns a dictionary of the deployment parameters in the format: {"foo": {"type": "string"}, "bar": {"type": "string"}} - param result: The result of the traverse_dict function. + param deployParams_paths: a dictionary of all the deploy parameters to search for, + with the key being the deploy parameter and the value being the + path to the value. + e.g. {"foo": ["global", "foo", "bar"]} param full_schema: The schema to search through. """ new_schema = {} no_schema_list = [] - for deploy_param in result: + for deploy_param, path_list in deployParams_paths.items(): + logger.debug( + "Searching for %s in schema at path %s", deploy_param, path_list + ) node = full_schema - for path_list in result[deploy_param]: + for path in path_list: if "properties" in node.keys(): - node = node["properties"][path_list] + logger.debug( + "Searching properties for %s in schema at path %s", + deploy_param, path + ) + node = node["properties"][path] else: + logger.debug("No schema node found for %s", deploy_param) no_schema_list.append(deploy_param) new_schema.update({deploy_param: {"type": "string"}}) if deploy_param not in new_schema: new_schema.update({deploy_param: {"type": node.get("type", None)}}) if no_schema_list: - print("No schema found for deployment parameter(s):", no_schema_list) - print("We default these parameters to type string") + logger.warning( + "No schema found for deployment parameter(s): %s", no_schema_list + ) + logger.warning( + "We default these parameters to type string. " + "Please edit schemas/%s in the output before publishing " + "if this is wrong", DEPLOYMENT_PARAMETERS + ) return new_schema def _replace_values_with_deploy_params( @@ -613,7 +670,7 @@ def _replace_values_with_deploy_params( final_values_mapping_dict[k].append( self._replace_values_with_deploy_params(item, param_name) ) - elif isinstance(v, (str, int, bool)): + elif isinstance(v, (str, int, bool)) or not v: replacement_value = f"{{deployParameters.{param_name}}}" final_values_mapping_dict[k].append(replacement_value) else: @@ -621,31 +678,9 @@ def _replace_values_with_deploy_params( f"Found an unexpected type {type(v)} of key {k} in " "values.yaml, cannot generate values mapping file." ) - else: - raise ValueError( - f"Found an unexpected type {type(v)} of key {k} in values.yaml, " - "cannot generate values mapping file." - ) - - return final_values_mapping_dict - - def _replace_values_with_deploy_params( - self, - values_yaml_dict, - param_prefix: Optional[str] = None, - ) -> Dict[Any, Any]: - """ - Given the yaml dictionary read from values.yaml, replace all the values with {deploymentParameter.keyname}. - - Thus creating a values mapping file if the user has not provided one in config. - """ - logger.debug("Replacing values with deploy parameters") - final_values_mapping_dict: Dict[Any, Any] = {} - for k, v in values_yaml_dict.items(): - # if value is a string and contains deployParameters. - logger.debug("Processing key %s", k) - param_name = k if param_prefix is None else f"{param_prefix}_{k}" - if isinstance(v, (str, int, bool)): + elif not v: + # V is blank so we don't know what type it is. Assuming it is an + # empty string (but do this after checking for dict and list) # Replace the parameter with {deploymentParameter.keyname} if self.interactive: # Interactive mode. Prompt user to include or exclude parameters @@ -658,30 +693,6 @@ def _replace_values_with_deploy_params( # add the schema for k (from the big schema) to the (smaller) schema final_values_mapping_dict.update({k: replacement_value}) - elif isinstance(v, dict): - final_values_mapping_dict[k] = self._replace_values_with_deploy_params( - v, param_name - ) - elif isinstance(v, list): - final_values_mapping_dict[k] = [] - for index, item in enumerate(v): - param_name = ( - f"{param_prefix}_{k}_{index}" - if param_prefix - else f"{k})_{index}" - ) - if isinstance(item, dict): - final_values_mapping_dict[k].append( - self._replace_values_with_deploy_params(item, param_name) - ) - elif isinstance(v, (str, int, bool)): - replacement_value = f"{{deployParameters.{param_name}}}" - final_values_mapping_dict[k].append(replacement_value) - else: - raise ValueError( - f"Found an unexpected type {type(v)} of key {k} in " - "values.yaml, cannot generate values mapping file." - ) else: raise ValueError( f"Found an unexpected type {type(v)} of key {k} in values.yaml, " @@ -709,8 +720,9 @@ def get_chart_name_and_version( chart_version = data["version"] else: raise FileOperationError( - f"A name or version is missing from Chart.yaml in the helm package '{helm_package.name}'. \ - Please fix this and run the command again." + "A name or version is missing from Chart.yaml in the " + f"helm package '{helm_package.name}'. " + "Please fix this and run the command again." ) return (chart_name, chart_version) diff --git a/src/aosm/azext_aosm/generate_nsd/nsd_generator.py b/src/aosm/azext_aosm/generate_nsd/nsd_generator.py index 3f365d4cbb6..63f723d7254 100644 --- a/src/aosm/azext_aosm/generate_nsd/nsd_generator.py +++ b/src/aosm/azext_aosm/generate_nsd/nsd_generator.py @@ -19,14 +19,15 @@ from azext_aosm.util.constants import ( CONFIG_MAPPINGS, NF_DEFINITION_BICEP_FILE, - NF_TEMPLATE_BICEP_FILE, + NF_TEMPLATE_JINJA2_SOURCE_TEMPLATE, NSD_ARTIFACT_MANIFEST_BICEP_FILE, NSD_ARTIFACT_MANIFEST_SOURCE_TEMPLATE, NSD_CONFIG_MAPPING_FILE, NSD_DEFINITION_BICEP_FILE, - NSD_DEFINITION_BICEP_SOURCE_TEMPLATE, + NSD_DEFINITION_JINJA2_SOURCE_TEMPLATE, SCHEMAS, TEMPLATES, + CNF, VNF, ) from azext_aosm.util.management_clients import ApiClients @@ -59,8 +60,8 @@ class NSDGenerator: def __init__(self, api_clients: ApiClients, config: NSConfiguration): self.config = config - self.nsd_bicep_template_name = NSD_DEFINITION_BICEP_SOURCE_TEMPLATE - self.nf_bicep_template_name = NF_TEMPLATE_BICEP_FILE + self.nsd_bicep_template_name = NSD_DEFINITION_JINJA2_SOURCE_TEMPLATE + self.nf_bicep_template_name = NF_TEMPLATE_JINJA2_SOURCE_TEMPLATE self.nsd_bicep_output_name = NSD_DEFINITION_BICEP_FILE self.nfdv_parameter_name = \ f"{self.config.network_function_definition_group_name.replace('-', '_')}_nfd_version" @@ -129,23 +130,36 @@ def config_group_schema_dict(self) -> Dict[str, Any]: # Add in the NFDV version as a parameter. description_string = ( f"The version of the {self.config.network_function_definition_group_name} " - f"NFD to use. This version must be compatable with (have the same " + f"NFD to use. This version must be compatible with (have the same " f"parameters exposed as) " f"{self.config.network_function_definition_version_name}." ) cgs_dict["properties"][self.nfdv_parameter_name] = \ {"type": "string", "description": description_string} + cgs_dict["required"].append(self.nfdv_parameter_name) managed_identity_description_string = ( "The managed identity to use to deploy NFs within this SNS. This should " - "of the form '/subscriptions/{subscriptionId}/resourceGroups/" + "be of the form '/subscriptions/{subscriptionId}/resourceGroups/" "{resourceGroupName}/providers/Microsoft.ManagedIdentity/" "userAssignedIdentities/{identityName}. " - "The az aosm tool only supports user assigned identities at present, " - "you cannot use a System Assigned identity." + "If you wish to use a system assigned identity, set this to a blank string." ) cgs_dict["properties"]["managedIdentity"] = \ {"type": "string", "description": managed_identity_description_string} + cgs_dict["required"].append("managedIdentity") + + if self.config.network_function_type == CNF: + custom_location_description_string = ( + "The custom location ID of the ARC-Enabled AKS Cluster to deploy the CNF " + "to. Should be of the form " + "'/subscriptions/c7bd9d96-70dd-4f61-af56-6e0abd8d80b5/resourcegroups" + "/{resourceGroupName}/providers/microsoft.extendedlocation/" + "customlocations/{customLocationName}'" + ) + cgs_dict["properties"]["customLocationId"] = \ + {"type": "string", "description": custom_location_description_string} + cgs_dict["required"].append("customLocationId") return cgs_dict @@ -201,17 +215,21 @@ def write_nf_bicep(self) -> None: bicep_deploymentValues = "" - - if "properties" not in self.deploy_parameters: + if not self.deploy_parameters or not self.deploy_parameters.get("properties"): raise ValueError( f"NFDV in {self.config.network_function_definition_group_name} has " "no properties within deployParameters" ) deploy_properties = self.deploy_parameters["properties"] + logger.debug("Deploy properties: %s", deploy_properties) for key, value in deploy_properties.items(): # location is sometimes part of deploy_properties. # We want to avoid having duplicate params in the bicep template + logger.debug( + "Adding deploy parameter key: %s, value: %s to nf template", + key, + value) if key != "location": bicep_type = ( NFV_TO_BICEP_PARAM_TYPES.get(value["type"]) or value["type"] @@ -234,9 +252,10 @@ def write_nf_bicep(self) -> None: # Ideally we would use the network_function_type from reading the actual # NF, as we do for deployParameters, but the SDK currently doesn't # support this and needs to be rebuilt to do so. - "nfvi_type": NFVIType.AZURE_CORE + "nfvi_type": NFVIType.AZURE_CORE.value if self.config.network_function_type == VNF else NFVIType.AZURE_ARC_KUBERNETES.value, + "CNF": True if self.config.network_function_type == CNF else False, }, ) @@ -263,7 +282,10 @@ def write_nsd_manifest(self) -> None: NSD_ARTIFACT_MANIFEST_SOURCE_TEMPLATE, NSD_ARTIFACT_MANIFEST_BICEP_FILE, {} ) - def generate_bicep(self, template_name, output_file_name, params) -> None: + def generate_bicep(self, + template_name: str, + output_file_name: str, + params: Dict[Any,Any]) -> None: """ Render the bicep templates with the correct parameters and copy them into the build output folder. diff --git a/src/aosm/azext_aosm/generate_nsd/templates/nf_template.bicep b/src/aosm/azext_aosm/generate_nsd/templates/nf_template.bicep.j2 similarity index 79% rename from src/aosm/azext_aosm/generate_nsd/templates/nf_template.bicep rename to src/aosm/azext_aosm/generate_nsd/templates/nf_template.bicep.j2 index 89a2362e8ec..8cf4a207a23 100644 --- a/src/aosm/azext_aosm/generate_nsd/templates/nf_template.bicep +++ b/src/aosm/azext_aosm/generate_nsd/templates/nf_template.bicep.j2 @@ -18,6 +18,11 @@ param networkFunctionDefinitionOfferingLocation string = '{{network_function_def @description('The managed identity that should be used to create the NF.') param managedIdentity string +{%- if CNF %} +@description('The custom location of the ARC-enabled AKS cluster to create the NF.') +param customLocationId string +{%- endif %} + param location string = '{{location}}' param nfviType string = '{{nfvi_type}}' @@ -30,15 +35,19 @@ var deploymentValues = { {{deploymentValues}} } +var identityObject = (managedIdentity == '') ? { + type: 'SystemAssigned' +} : { + type: 'UserAssigned' + userAssignedIdentities: { + '${managedIdentity}': {} + } +} + resource nf_resource 'Microsoft.HybridNetwork/networkFunctions@2023-04-01-preview' = { name: '{{network_function_name}}' location: location - identity: { - type: 'UserAssigned' - userAssignedIdentities: { - '${managedIdentity}': {} - } - } + identity: identityObject properties: { publisherName: publisherName publisherScope: 'Private' @@ -46,7 +55,11 @@ resource nf_resource 'Microsoft.HybridNetwork/networkFunctions@2023-04-01-previe networkFunctionDefinitionVersion: {{network_function_definition_version_parameter}} networkFunctionDefinitionOfferingLocation: networkFunctionDefinitionOfferingLocation nfviType: nfviType +{%- if CNF %} + nfviId: customLocationId +{%- else %} nfviId: resourceGroupId +{%- endif %} allowSoftwareUpdate: true deploymentValues: string(deploymentValues) } diff --git a/src/aosm/azext_aosm/generate_nsd/templates/nsd_template.bicep b/src/aosm/azext_aosm/generate_nsd/templates/nsd_template.bicep.j2 similarity index 92% rename from src/aosm/azext_aosm/generate_nsd/templates/nsd_template.bicep rename to src/aosm/azext_aosm/generate_nsd/templates/nsd_template.bicep.j2 index 0f777e01780..3570adf0247 100644 --- a/src/aosm/azext_aosm/generate_nsd/templates/nsd_template.bicep +++ b/src/aosm/azext_aosm/generate_nsd/templates/nsd_template.bicep.j2 @@ -98,12 +98,11 @@ resource nsdVersion 'Microsoft.Hybridnetwork/publishers/networkservicedesigngrou templateType: 'ArmTemplate' // The parameter values map values from the CG schema, to values required by the template // deployed by this resource element. - // outputParameters from the disk RET are used in these parameterValues // This NSD does not support the NF-Agent as it has no Configuration Resource Elements. - // If Configuration resource elements (SDFs, Perimeta config) are added, the simplNfConfigMapping + // If Configuration resource elements (SDFs, Perimeta config) are added, the configMapping // must be edited to have these lines (instead of blank values. SNSSelf is null if there are no Configuration elements) - // "nfAgentServiceBusNamespace": "{configurationparameters('SNSSelf').nfAgentConfiguration.resourceNamespace}", - // "nfAgentUserAssignedIdentityResourceId": "{configurationparameters('SNSSelf').nfAgentConfiguration.userAssignedIdentityResourceId}", + // "": "{configurationparameters('SNSSelf').nfAgentConfiguration.resourceNamespace}", + // "": "{configurationparameters('SNSSelf').nfAgentConfiguration.userAssignedIdentityResourceId}", parameterValues: string(loadJsonContent('configMappings/configMappings.json')) } dependsOnProfile: { diff --git a/src/aosm/azext_aosm/util/constants.py b/src/aosm/azext_aosm/util/constants.py index 7b3087f870e..9e9d7492c2e 100644 --- a/src/aosm/azext_aosm/util/constants.py +++ b/src/aosm/azext_aosm/util/constants.py @@ -10,10 +10,14 @@ NSD = "nsd" SCHEMA = "schema" +# Skip steps +BICEP_PUBLISH = "bicep-publish" +ARTIFACT_UPLOAD = "artifact-upload" + # Names of files used in the repo -NSD_DEFINITION_BICEP_SOURCE_TEMPLATE = "nsd_template.bicep" +NSD_DEFINITION_JINJA2_SOURCE_TEMPLATE = "nsd_template.bicep.j2" NSD_DEFINITION_BICEP_FILE = "nsd_definition.bicep" -NF_TEMPLATE_BICEP_FILE = "nf_template.bicep" +NF_TEMPLATE_JINJA2_SOURCE_TEMPLATE = "nf_template.bicep.j2" NF_DEFINITION_BICEP_FILE = "nf_definition.bicep" NF_DEFINITION_JSON_FILE = "nf_definition.json" NSD_DEFINITION_OUTPUT_BICEP_PREFIX = "nsd-bicep-templates" @@ -72,3 +76,12 @@ IMAGE_NAME_AND_VERSION_REGEX = r"\/([^\s]*):([^\s)\"}]*)" DEPLOYMENT_PARAMETER_MAPPING_REGEX = r"\{deployParameters.(.+?)\}" + +# Assume that the registry id is of the form: +# /subscriptions//resourceGroups//providers/ +# Microsoft.ContainerRegistry/registries/ +# This returns groups for the resource group name and registry name +SOURCE_ACR_REGEX = ( + r".*\/resourceGroups\/([^\/]*)\/providers\/Microsoft." + r"ContainerRegistry\/registries\/([^\/]*)" + ) From f4bd00c44911c2d65ae64d63a2632c3fc173aaf1 Mon Sep 17 00:00:00 2001 From: sunnycarter <36891339+sunnycarter@users.noreply.github.com> Date: Fri, 30 Jun 2023 17:51:18 +0100 Subject: [PATCH 4/7] merge add-aosm-ext in (#37) --- .github/workflows/CheckStyleAndLinting.yml | 2 +- src/aosm/azext_aosm/_client_factory.py | 2 +- src/aosm/azext_aosm/_configuration.py | 128 +++++++++++------- src/aosm/azext_aosm/_params.py | 46 +++++-- src/aosm/azext_aosm/commands.py | 1 - src/aosm/azext_aosm/custom.py | 28 ++-- src/aosm/azext_aosm/delete/delete.py | 46 +++++-- src/aosm/azext_aosm/deploy/artifact.py | 31 +++-- .../azext_aosm/deploy/artifact_manifest.py | 7 +- src/aosm/azext_aosm/deploy/deploy_with_arm.py | 108 +++++++-------- src/aosm/azext_aosm/deploy/pre_deploy.py | 70 ++++++---- .../generate_nfd/cnf_nfd_generator.py | 116 ++++++++-------- .../generate_nfd/nfd_generator_base.py | 14 +- .../generate_nfd/vnf_nfd_generator.py | 37 +++-- .../azext_aosm/generate_nsd/nsd_generator.py | 90 +++++++----- src/aosm/azext_aosm/util/constants.py | 58 ++++---- .../azext_aosm/util/management_clients.py | 21 ++- src/aosm/setup.md | 2 + src/aosm/setup.py | 3 +- 19 files changed, 447 insertions(+), 363 deletions(-) diff --git a/.github/workflows/CheckStyleAndLinting.yml b/.github/workflows/CheckStyleAndLinting.yml index 0c8382649d8..82e39b86e0b 100644 --- a/.github/workflows/CheckStyleAndLinting.yml +++ b/.github/workflows/CheckStyleAndLinting.yml @@ -15,4 +15,4 @@ jobs: - name: Check Style run: azdev style aosm - name: Check Linting - run: azdev linter aosm + run: azdev linter aosm \ No newline at end of file diff --git a/src/aosm/azext_aosm/_client_factory.py b/src/aosm/azext_aosm/_client_factory.py index 66c3bea2ab8..61fe56814a4 100644 --- a/src/aosm/azext_aosm/_client_factory.py +++ b/src/aosm/azext_aosm/_client_factory.py @@ -5,9 +5,9 @@ from azure.cli.core.commands.client_factory import get_mgmt_service_client from azure.cli.core.profiles import ResourceType +from azure.mgmt.containerregistry import ContainerRegistryManagementClient from .vendored_sdks import HybridNetworkManagementClient -from azure.mgmt.containerregistry import ContainerRegistryManagementClient def cf_aosm(cli_ctx, *_) -> HybridNetworkManagementClient: diff --git a/src/aosm/azext_aosm/_configuration.py b/src/aosm/azext_aosm/_configuration.py index 1c41cf562d0..f933e3bd524 100644 --- a/src/aosm/azext_aosm/_configuration.py +++ b/src/aosm/azext_aosm/_configuration.py @@ -1,6 +1,3 @@ -## Disabling as every if statement in validate in NSConfig class has this condition -# pylint: disable=simplifiable-condition - import os import re from dataclasses import dataclass, field @@ -11,77 +8,102 @@ from azext_aosm.util.constants import ( CNF, - DEFINITION_OUTPUT_BICEP_PREFIX, - NF_DEFINITION_JSON_FILE, + NF_DEFINITION_OUTPUT_BICEP_PREFIX, + NF_DEFINITION_JSON_FILENAME, NSD, - NSD_DEFINITION_OUTPUT_BICEP_PREFIX, + NSD_OUTPUT_BICEP_PREFIX, VNF, SOURCE_ACR_REGEX ) DESCRIPTION_MAP: Dict[str, str] = { - "publisher_resource_group_name": + "publisher_resource_group_name": ( "Resource group for the Publisher resource. " - "Will be created if it does not exist.", - "publisher_name": + "Will be created if it does not exist." + ), + "publisher_name": ( "Name of the Publisher resource you want your definition published to. " - "Will be created if it does not exist.", - "publisher_name_nsd": + "Will be created if it does not exist." + ), + "publisher_name_nsd": ( "Name of the Publisher resource you want your design published to. " "This should be the same as the publisher used for your NFDVs" - , + ), "publisher_resource_group_name_nsd": "Resource group for the Publisher resource.", "nf_name": "Name of NF definition", "version": "Version of the NF definition", - "acr_artifact_store_name": "Name of the ACR Artifact Store resource. Will be created if it does not exist.", + "acr_artifact_store_name": ( + "Name of the ACR Artifact Store resource. Will be created if it does not exist." + ), "location": "Azure location to use when creating resources.", - "blob_artifact_store_name": + "blob_artifact_store_name": ( "Name of the storage account Artifact Store resource. Will be created if it " - "does not exist.", + "does not exist." + ), "artifact_name": "Name of the artifact", - "file_path": "Optional. File path of the artifact you wish to upload from your local disk. " - "Delete if not required.", - "blob_sas_url": "Optional. SAS URL of the blob artifact you wish to copy to your Artifact Store. " - "Delete if not required.", - "artifact_version": "Version of the artifact. For VHDs this must be in format A-B-C. " - "For ARM templates this must be in format A.B.C", + "file_path": ( + "Optional. File path of the artifact you wish to upload from your local disk. " + "Delete if not required." + ), + "blob_sas_url": ( + "Optional. SAS URL of the blob artifact you wish to copy to your Artifact" + " Store. Delete if not required." + ), + "artifact_version": ( + "Version of the artifact. For VHDs this must be in format A-B-C. " + "For ARM templates this must be in format A.B.C" + ), "nsdv_description": "Description of the NSDV", - "nsdg_name": "Network Service Design Group Name. This is the collection of Network Service Design Versions. " - "Will be created if it does not exist.", - "nsd_version": "Version of the NSD to be created. This should be in the format A.B.C", - "network_function_definition_group_name": + "nsdg_name": ( + "Network Service Design Group Name. This is the collection of Network Service" + " Design Versions. Will be created if it does not exist." + ), + "nsd_version": ( + "Version of the NSD to be created. This should be in the format A.B.C" + ), + "network_function_definition_group_name": ( "Existing Network Function Definition Group Name. " - "This can be created using the 'az aosm nfd' commands.", - "network_function_definition_version_name": + "This can be created using the 'az aosm nfd' commands." + ), + "network_function_definition_version_name": ( "Existing Network Function Definition Version Name. " - "This can be created using the 'az aosm nfd' commands.", - "network_function_definition_offering_location": "Offering location of the Network Function Definition", - "network_function_type": "Type of nf in the definition. Valid values are 'cnf' or 'vnf'", + "This can be created using the 'az aosm nfd' commands." + ), + "network_function_definition_offering_location": ( + "Offering location of the Network Function Definition" + ), + "network_function_type": ( + "Type of nf in the definition. Valid values are 'cnf' or 'vnf'" + ), "helm_package_name": "Name of the Helm package", - "path_to_chart": - "File path of Helm Chart on local disk. Accepts .tgz, .tar or .tar.gz", - "path_to_mappings": + "path_to_chart": ( + "File path of Helm Chart on local disk. Accepts .tgz, .tar or .tar.gz" + ), + "path_to_mappings": ( "File path of value mappings on local disk where chosen values are replaced " "with deploymentParameter placeholders. Accepts .yaml or .yml. If left as a " "blank string, a value mappings file will be generated with every value " "mapped to a deployment parameter. Use a blank string and --interactive on " "the build command to interactively choose which values to map." - , - "helm_depends_on": + ), + "helm_depends_on": ( "Names of the Helm packages this package depends on. " - "Leave as an empty array if no dependencies", - "image_name_parameter": + "Leave as an empty array if no dependencies" + ), + "image_name_parameter": ( "The parameter name in the VM ARM template which specifies the name of the " - "image to use for the VM.", - "source_registry_id": - "Resource ID of the source acr registry from which to pull " - "the image", - "source_registry_namespace": + "image to use for the VM." + ), + "source_registry_id": ( + "Resource ID of the source acr registry from which to pull the image" + ), + "source_registry_namespace": ( "Optional. Namespace of the repository of the source acr registry from which " "to pull. For example if your repository is samples/prod/nginx then set this to" " samples/prod . Leave blank if the image is in the root namespace." "See https://learn.microsoft.com/en-us/azure/container-registry/" - "container-registry-best-practices#repository-namespaces for further details.", + "container-registry-best-practices#repository-namespaces for further details." + ), } @@ -143,6 +165,9 @@ class NSConfiguration: def validate(self): """Validate that all of the configuration parameters are set.""" + # Exemption for pylint as explicitly including the empty string makes the code clearer + # pylint: disable=simplifiable-condition + if self.location == DESCRIPTION_MAP["location"] or "": raise ValueError("Location must be set") if self.publisher_name == DESCRIPTION_MAP["publisher_name_nsd"] or "": @@ -189,7 +214,7 @@ def validate(self): def build_output_folder_name(self) -> str: """Return the local folder for generating the bicep template to.""" current_working_directory = os.getcwd() - return f"{current_working_directory}/{NSD_DEFINITION_OUTPUT_BICEP_PREFIX}" + return f"{current_working_directory}/{NSD_OUTPUT_BICEP_PREFIX}" @property def resource_element_name(self) -> str: @@ -225,7 +250,7 @@ def arm_template(self) -> ArtifactConfig: artifact = ArtifactConfig() artifact.version = self.nsd_version artifact.file_path = os.path.join( - self.build_output_folder_name, NF_DEFINITION_JSON_FILE + self.build_output_folder_name, NF_DEFINITION_JSON_FILENAME ) return artifact @@ -268,11 +293,13 @@ def validate(self) -> None: if "." in self.vhd.version or "-" not in self.vhd.version: raise ValidationError( - "Config validation error. VHD artifact version should be in format A-B-C" + "Config validation error. VHD artifact version should be in format" + " A-B-C" ) if "." not in self.arm_template.version or "-" in self.arm_template.version: raise ValidationError( - "Config validation error. ARM template artifact version should be in format A.B.C" + "Config validation error. ARM template artifact version should be in" + " format A.B.C" ) filepath_set = ( self.vhd.file_path and self.vhd.file_path != DESCRIPTION_MAP["file_path"] @@ -284,7 +311,8 @@ def validate(self) -> None: # If these are the same, either neither is set or both are, both of which are errors if filepath_set == sas_set: raise ValidationError( - "Config validation error. VHD config must have either a local filepath or a blob SAS URL" + "Config validation error. VHD config must have either a local filepath" + " or a blob SAS URL" ) if filepath_set: @@ -304,7 +332,7 @@ def sa_manifest_name(self) -> str: def build_output_folder_name(self) -> str: """Return the local folder for generating the bicep template to.""" arm_template_path = self.arm_template.file_path - return f"{DEFINITION_OUTPUT_BICEP_PREFIX}{Path(str(arm_template_path)).stem}" + return f"{NF_DEFINITION_OUTPUT_BICEP_PREFIX}{Path(str(arm_template_path)).stem}" @dataclass @@ -336,7 +364,7 @@ def __post_init__(self): @property def build_output_folder_name(self) -> str: """Return the local folder for generating the bicep template to.""" - return f"{DEFINITION_OUTPUT_BICEP_PREFIX}{self.nf_name}" + return f"{NF_DEFINITION_OUTPUT_BICEP_PREFIX}{self.nf_name}" def validate(self): """Validate the CNF config diff --git a/src/aosm/azext_aosm/_params.py b/src/aosm/azext_aosm/_params.py index ab21d6348a5..2c379a3494a 100644 --- a/src/aosm/azext_aosm/_params.py +++ b/src/aosm/azext_aosm/_params.py @@ -2,7 +2,6 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long from argcomplete.completers import FilesCompleter from azure.cli.core import AzCommandsLoader @@ -44,51 +43,72 @@ def load_arguments(self: AzCommandsLoader, _): options_list=["--definition-file", "-b"], type=file_type, completer=FilesCompleter(allowednames="*.json"), - help="Optional path to a bicep file to publish. Use to override publish of the built definition with an alternative file.", + help=( + "Optional path to a bicep file to publish. Use to override publish of" + " the built definition with an alternative file." + ), ) c.argument( "design_file", options_list=["--design-file", "-b"], type=file_type, completer=FilesCompleter(allowednames="*.bicep"), - help="Optional path to a bicep file to publish. Use to override publish of the built design with an alternative file.", + help=( + "Optional path to a bicep file to publish. Use to override publish of" + " the built design with an alternative file." + ), ) c.argument( "order_params", arg_type=get_three_state_flag(), - help="VNF definition_type only - ignored for CNF." - " Order deploymentParameters schema and configMappings to have the " - "parameters without default values at the top and those with default " - "values at the bottom. Can make it easier to remove those with defaults " - "which you do not want to expose as NFD parameters.", + help=( + "VNF definition_type only - ignored for CNF. Order deploymentParameters" + " schema and configMappings to have the parameters without default" + " values at the top and those with default values at the bottom. Can" + " make it easier to remove those with defaults which you do not want to" + " expose as NFD parameters." + ), ) c.argument( "interactive", options_list=["--interactive", "-i"], arg_type=get_three_state_flag(), - help="Prompt user to choose every parameter to expose as an NFD parameter." - " Those without defaults are automatically included.", + help=( + "Prompt user to choose every parameter to expose as an NFD parameter." + " Those without defaults are automatically included." + ), ) c.argument( "parameters_json_file", options_list=["--parameters-file", "-p"], type=file_type, completer=FilesCompleter(allowednames="*.json"), - help="Optional path to a parameters file for the bicep definition file. Use to override publish of the built definition and config with alternative parameters.", + help=( + "Optional path to a parameters file for the bicep definition file. Use" + " to override publish of the built definition and config with" + " alternative parameters." + ), ) c.argument( "manifest_file", options_list=["--manifest-file", "-m"], type=file_type, completer=FilesCompleter(allowednames="*.json"), - help="Optional path to a bicep file to publish manifests. Use to override publish of the built definition with an alternative file.", + help=( + "Optional path to a bicep file to publish manifests. Use to override" + " publish of the built definition with an alternative file." + ), ) c.argument( "manifest_parameters_json_file", options_list=["--manifest-parameters-file", "-mp"], type=file_type, completer=FilesCompleter(allowednames="*.json"), - help="Optional path to a parameters file for the manifest definition file. Use to override publish of the built definition and config with alternative parameters.", + help=( + "Optional path to a parameters file for the manifest definition file." + " Use to override publish of the built definition and config with" + " alternative parameters." + ), ) c.argument( "skip", arg_type=skip_steps, help="Optional skip steps" diff --git a/src/aosm/azext_aosm/commands.py b/src/aosm/azext_aosm/commands.py index 8fd99d31c23..abc33f8444b 100644 --- a/src/aosm/azext_aosm/commands.py +++ b/src/aosm/azext_aosm/commands.py @@ -3,7 +3,6 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long from azure.cli.core import AzCommandsLoader from azext_aosm._client_factory import cf_aosm diff --git a/src/aosm/azext_aosm/custom.py b/src/aosm/azext_aosm/custom.py index ff45db84666..bbe7ed00abd 100644 --- a/src/aosm/azext_aosm/custom.py +++ b/src/aosm/azext_aosm/custom.py @@ -16,7 +16,7 @@ ) from knack.log import get_logger -from azext_aosm._client_factory import cf_resources, cf_acr_registries +from azext_aosm._client_factory import cf_acr_registries, cf_resources from azext_aosm._configuration import ( CNFConfiguration, NFConfiguration, @@ -90,7 +90,8 @@ def _get_config_from_file( if not os.path.exists(config_file): raise InvalidArgumentValueError( - f"Config file {config_file} not found. Please specify a valid config file path." + f"Config file {config_file} not found. Please specify a valid config file" + " path." ) with open(config_file, "r", encoding="utf-8") as f: @@ -112,11 +113,13 @@ def _generate_nfd( nfd_generator = CnfNfdGenerator(config, interactive) else: raise CLIInternalError( - "Generate NFD called for unrecognised definition_type. Only VNF and CNF have been implemented." + "Generate NFD called for unrecognised definition_type. Only VNF and CNF" + " have been implemented." ) if nfd_generator.bicep_path: carry_on = input( - f"The folder {os.path.dirname(nfd_generator.bicep_path)} already exists - delete it and continue? (y/n)" + f"The folder {os.path.dirname(nfd_generator.bicep_path)} already exists -" + " delete it and continue? (y/n)" ) if carry_on != "y": raise UnclassifiedUserFault("User aborted! ") @@ -188,7 +191,8 @@ def publish_definition( ) else: raise ValueError( - f"Definition type must be either 'vnf' or 'cnf'. Definition type {definition_type} is not recognised." + "Definition type must be either 'vnf' or 'cnf'. Definition type" + f" {definition_type} is not recognised." ) @@ -223,8 +227,8 @@ def delete_published_definition( delly.delete_nfd(clean=clean) else: raise ValueError( - "Definition type must be either 'vnf' or 'cnf'. " - f"Definition type {definition_type} is not recognised." + "Definition type must be either 'vnf' or 'cnf'. Definition type" + f" {definition_type} is not recognised." ) @@ -251,7 +255,8 @@ def _generate_config(configuration_type: str, output_file: str = "input.json"): if os.path.exists(output_file): carry_on = input( - f"The file {output_file} already exists - do you want to overwrite it? (y/n)" + f"The file {output_file} already exists - do you want to overwrite it?" + " (y/n)" ) if carry_on != "y": raise UnclassifiedUserFault("User aborted!") @@ -263,7 +268,9 @@ def _generate_config(configuration_type: str, output_file: str = "input.json"): else: prtName = "design" print(f"Empty {prtName} configuration has been written to {output_file}") - logger.info(f"Empty {prtName} configuration has been written to {output_file}") + logger.info( + "Empty %s configuration has been written to %s", prtName, output_file + ) def build_design(cmd, client: HybridNetworkManagementClient, config_file: str): @@ -370,7 +377,8 @@ def _generate_nsd(config: NSConfiguration, api_clients: ApiClients): """Generate a Network Service Design for the given config.""" if os.path.exists(config.build_output_folder_name): carry_on = input( - f"The folder {config.build_output_folder_name} already exists - delete it and continue? (y/n)" + f"The folder {config.build_output_folder_name} already exists - delete it" + " and continue? (y/n)" ) if carry_on != "y": raise UnclassifiedUserFault("User aborted! ") diff --git a/src/aosm/azext_aosm/delete/delete.py b/src/aosm/azext_aosm/delete/delete.py index aeedaaf4543..6894f6f3544 100644 --- a/src/aosm/azext_aosm/delete/delete.py +++ b/src/aosm/azext_aosm/delete/delete.py @@ -41,13 +41,16 @@ def delete_nfd(self, clean: bool = False): if clean: print( - f"Are you sure you want to delete all resources associated with NFD {self.config.nf_name} including the artifact stores and publisher {self.config.publisher_name}?" + "Are you sure you want to delete all resources associated with NFD" + f" {self.config.nf_name} including the artifact stores and publisher" + f" {self.config.publisher_name}?" ) logger.warning( "This command will fail if other NFD versions exist in the NFD group." ) logger.warning( - "Only do this if you are SURE you are not sharing the publisher and artifact stores with other NFDs" + "Only do this if you are SURE you are not sharing the publisher and" + " artifact stores with other NFDs" ) print("There is no undo. Type the publisher name to confirm.") if not input_ack(self.config.publisher_name.lower(), "Confirm delete:"): @@ -55,7 +58,9 @@ def delete_nfd(self, clean: bool = False): return else: print( - f"Are you sure you want to delete the NFD Version {self.config.version} and associated manifests from group {self.config.nfdg_name} and publisher {self.config.publisher_name}?" + "Are you sure you want to delete the NFD Version" + f" {self.config.version} and associated manifests from group" + f" {self.config.nfdg_name} and publisher {self.config.publisher_name}?" ) print("There is no undo. Type 'delete' to confirm") if not input_ack("delete", "Confirm delete:"): @@ -85,7 +90,10 @@ def delete_nsd(self): assert isinstance(self.config, NSConfiguration) print( - f"Are you sure you want to delete the NSD Version {self.config.nsd_version}, the associated manifest {self.config.acr_manifest_name} and configuration group schema {self.config.cg_schema_name}?" + "Are you sure you want to delete the NSD Version" + f" {self.config.nsd_version}, the associated manifest" + f" {self.config.acr_manifest_name} and configuration group schema" + f" {self.config.cg_schema_name}?" ) print("There is no undo. Type 'delete' to confirm") if not input_ack("delete", "Confirm delete:"): @@ -97,7 +105,10 @@ def delete_nsd(self): self.delete_config_group_schema() def delete_nfdv(self): - message = f"Delete NFDV {self.config.version} from group {self.config.nfdg_name} and publisher {self.config.publisher_name}" + message = ( + f"Delete NFDV {self.config.version} from group {self.config.nfdg_name} and" + f" publisher {self.config.publisher_name}" + ) logger.debug(message) print(message) try: @@ -111,13 +122,18 @@ def delete_nfdv(self): print("Deleted NFDV.") except Exception: logger.error( - f"Failed to delete NFDV {self.config.version} from group {self.config.nfdg_name}" + "Failed to delete NFDV %s from group %s", + self.config.version, + self.config.nfdg_name, ) raise def delete_nsdv(self): assert isinstance(self.config, NSConfiguration) - message = f"Delete NSDV {self.config.nsd_version} from group {self.config.nsdg_name} and publisher {self.config.publisher_name}" + message = ( + f"Delete NSDV {self.config.nsd_version} from group" + f" {self.config.nsdg_name} and publisher {self.config.publisher_name}" + ) logger.debug(message) print(message) try: @@ -131,7 +147,9 @@ def delete_nsdv(self): print("Deleted NSDV.") except Exception: logger.error( - f"Failed to delete NSDV {self.config.nsd_version} from group {self.config.nsdg_name}" + "Failed to delete NSDV %s from group %s", + self.config.nsd_version, + self.config.nsdg_name, ) raise @@ -154,7 +172,8 @@ def delete_artifact_manifest(self, store_type: str) -> None: from azure.cli.core.azclierror import CLIInternalError raise CLIInternalError( - "Delete artifact manifest called for invalid store type. Valid types are sa and acr." + "Delete artifact manifest called for invalid store type. Valid types" + " are sa and acr." ) message = ( f"Delete Artifact manifest {manifest_name} from artifact store {store_name}" @@ -172,7 +191,9 @@ def delete_artifact_manifest(self, store_type: str) -> None: print("Deleted Artifact Manifest") except Exception: logger.error( - f"Failed to delete Artifact manifest {manifest_name} from artifact store {store_name}" + "Failed to delete Artifact manifest %s from artifact store %s", + manifest_name, + store_name, ) raise @@ -226,7 +247,8 @@ def delete_artifact_store(self, store_type: str) -> None: from azure.cli.core.azclierror import CLIInternalError raise CLIInternalError( - "Delete artifact store called for invalid store type. Valid types are sa and acr." + "Delete artifact store called for invalid store type. Valid types are" + " sa and acr." ) message = f"Delete Artifact store {store_name}" logger.debug(message) @@ -240,7 +262,7 @@ def delete_artifact_store(self, store_type: str) -> None: poller.result() print("Deleted Artifact Store") except Exception: - logger.error(f"Failed to delete Artifact store {store_name}") + logger.error("Failed to delete Artifact store %s", store_name) raise def delete_publisher(self) -> None: diff --git a/src/aosm/azext_aosm/deploy/artifact.py b/src/aosm/azext_aosm/deploy/artifact.py index 441f119bac3..8d080933abd 100644 --- a/src/aosm/azext_aosm/deploy/artifact.py +++ b/src/aosm/azext_aosm/deploy/artifact.py @@ -3,23 +3,21 @@ # pylint: disable=unidiomatic-typecheck """A module to handle interacting with artifacts.""" +import subprocess from dataclasses import dataclass from typing import Union, List -import subprocess + +from azure.cli.core.commands import LongRunningOperation +from azure.mgmt.containerregistry.models import ImportImageParameters, ImportSource +from azure.storage.blob import BlobClient, BlobType from knack.log import get_logger +from knack.util import CLIError from oras.client import OrasClient from azure.cli.core.commands import LongRunningOperation from azure.mgmt.containerregistry import ContainerRegistryManagementClient -from azure.storage.blob import BlobClient, BlobType -from azure.mgmt.containerregistry.models import ( - ImportImageParameters, - ImportSource, -) - from azext_aosm._configuration import ArtifactConfig, HelmPackageConfig - logger = get_logger(__name__) @@ -86,12 +84,12 @@ def _upload_helm_to_acr(self, artifact_config: HelmPackageConfig) -> None: login_command = ["az", "acr", "login", "--name", registry_name] subprocess.run(login_command, check=True) - logger.debug(f"Uploading {chart_path} to {target_registry}") + logger.debug("Uploading %s to %s", chart_path, target_registry) # helm push "$chart_path" "$target_registry" push_command = ["helm", "push", chart_path, target_registry] subprocess.run(push_command, check=True) - + # If we don't logout from the registry, future Artifact uploads to this ACR # will fail with an UNAUTHORIZED error. There is no az acr logout command, but # it is a wrapper around docker, so a call to docker logout will work. @@ -134,11 +132,12 @@ def _upload_to_storage_account(self, artifact_config: ArtifactConfig) -> None: ) else: raise RuntimeError( - f"{source_blob.blob_name} does not exist in {source_blob.account_name}." + f"{source_blob.blob_name} does not exist in" + f" {source_blob.account_name}." ) + @staticmethod def copy_image( - self, cli_ctx, container_registry_client: ContainerRegistryManagementClient, source_registry_id: str, @@ -182,10 +181,12 @@ def copy_image( logger.info( "Successfully imported %s to %s", source_image, target_registry_name ) - except Exception as error: + except CLIError as error: logger.error( - "Failed to import %s to %s. Check if this image exists in the source " - "registry or is already present in the target registry.", + ( + "Failed to import %s to %s. Check if this image exists in the" + " source registry or is already present in the target registry." + ), source_image, target_registry_name, ) diff --git a/src/aosm/azext_aosm/deploy/artifact_manifest.py b/src/aosm/azext_aosm/deploy/artifact_manifest.py index 168021e8519..55b55fd95e6 100644 --- a/src/aosm/azext_aosm/deploy/artifact_manifest.py +++ b/src/aosm/azext_aosm/deploy/artifact_manifest.py @@ -139,7 +139,9 @@ def _get_artifact_client( # For AOSM to work VHD blobs must have the suffix .vhd if artifact.artifact_name.endswith("-vhd"): - blob_name = f"{artifact.artifact_name[:-4].replace('-', '')}-{artifact.artifact_version}.vhd" + blob_name = ( + f"{artifact.artifact_name[:-4].replace('-', '')}-{artifact.artifact_version}.vhd" + ) else: blob_name = container_name @@ -159,8 +161,7 @@ def _get_blob_url(self, container_name: str, blob_name: str) -> str: for container_credential in self._manifest_credentials["container_credentials"]: if container_credential["container_name"] == container_name: sas_uri = str(container_credential["container_sas_uri"]) - sas_uri_prefix = sas_uri.split("?")[0] # pylint: disable=use-maxsplit-arg - sas_uri_token = sas_uri.split("?")[1] + sas_uri_prefix, sas_uri_token = sas_uri.split("?", maxsplit=1) blob_url = f"{sas_uri_prefix}/{blob_name}?{sas_uri_token}" logger.debug("Blob URL: %s", blob_url) diff --git a/src/aosm/azext_aosm/deploy/deploy_with_arm.py b/src/aosm/azext_aosm/deploy/deploy_with_arm.py index da9eb371ac9..a8aaa945d72 100644 --- a/src/aosm/azext_aosm/deploy/deploy_with_arm.py +++ b/src/aosm/azext_aosm/deploy/deploy_with_arm.py @@ -16,28 +16,27 @@ from knack.log import get_logger from azext_aosm._configuration import ( + CNFConfiguration, NFConfiguration, NSConfiguration, VNFConfiguration, - CNFConfiguration, ) -from azext_aosm.deploy.artifact_manifest import ArtifactManifestOperator from azext_aosm.deploy.artifact import Artifact -from azext_aosm.util.management_clients import ApiClients +from azext_aosm.deploy.artifact_manifest import ArtifactManifestOperator from azext_aosm.deploy.pre_deploy import PreDeployerViaSDK from azext_aosm.util.constants import ( ARTIFACT_UPLOAD, BICEP_PUBLISH, - NF_DEFINITION_BICEP_FILE, - NSD, - NSD_ARTIFACT_MANIFEST_BICEP_FILE, - NSD_DEFINITION_BICEP_FILE, - CNF_DEFINITION_BICEP_TEMPLATE, - CNF_MANIFEST_BICEP_TEMPLATE, CNF, + CNF_DEFINITION_BICEP_TEMPLATE_FILENAME, + CNF_MANIFEST_BICEP_TEMPLATE_FILENAME, + NF_DEFINITION_BICEP_FILENAME, + NSD, + NSD_ARTIFACT_MANIFEST_BICEP_FILENAME, + NSD_BICEP_FILENAME, VNF, - VNF_DEFINITION_BICEP_TEMPLATE, - VNF_MANIFEST_BICEP_TEMPLATE, + VNF_DEFINITION_BICEP_TEMPLATE_FILENAME, + VNF_MANIFEST_BICEP_TEMPLATE_FILENAME, SOURCE_ACR_REGEX, ) from azext_aosm.util.management_clients import ApiClients @@ -69,7 +68,8 @@ def __init__( self.config = config self.pre_deployer = PreDeployerViaSDK(api_clients, self.config) - def read_parameters_from_file(self, parameters_json_file: str) -> Dict[str, Any]: + @staticmethod + def read_parameters_from_file(parameters_json_file: str) -> Dict[str, Any]: """ Read parameters from a file. @@ -121,7 +121,7 @@ def deploy_vnfd_from_bicep( # one produced from building the NFDV using this CLI bicep_path = os.path.join( self.config.build_output_folder_name, - VNF_DEFINITION_BICEP_TEMPLATE, + VNF_DEFINITION_BICEP_TEMPLATE_FILENAME, ) if parameters_json_file: @@ -148,10 +148,10 @@ def deploy_vnfd_from_bicep( f"version {self.config.version}" ) message = ( - f"Deploy bicep template for NFD {self.config.nf_name} " - f"version {self.config.version} " - f"into {self.config.publisher_resource_group_name} under publisher " - f"{self.config.publisher_name}" + f"Deploy bicep template for NFD {self.config.nf_name} version" + f" {self.config.version} into" + f" {self.config.publisher_resource_group_name} under publisher" + f" {self.config.publisher_name}" ) print(message) logger.info(message) @@ -252,14 +252,14 @@ def construct_manifest_parameters(self) -> Dict[str, Any]: "vhdVersion": {"value": self.config.vhd.version}, "armTemplateVersion": {"value": self.config.arm_template.version}, } - elif isinstance(self.config, CNFConfiguration): + if isinstance(self.config, CNFConfiguration): return { "location": {"value": self.config.location}, "publisherName": {"value": self.config.publisher_name}, "acrArtifactStoreName": {"value": self.config.acr_artifact_store_name}, "acrManifestName": {"value": self.config.acr_manifest_name}, } - elif isinstance(self.config, NSConfiguration): + if isinstance(self.config, NSConfiguration): return { "location": {"value": self.config.location}, "publisherName": {"value": self.config.publisher_name}, @@ -301,7 +301,7 @@ def deploy_cnfd_from_bicep( # default one produced from building the NFDV using this CLI bicep_path = os.path.join( self.config.build_output_folder_name, - CNF_DEFINITION_BICEP_TEMPLATE, + CNF_DEFINITION_BICEP_TEMPLATE_FILENAME, ) if parameters_json_file: @@ -314,7 +314,7 @@ def deploy_cnfd_from_bicep( parameters = self.construct_cnfd_parameters() logger.debug( - f"Parameters used for CNF definition bicep deployment: {parameters}" + "Parameters used for CNF definition bicep deployment: %s", parameters ) # Create or check required resources @@ -329,10 +329,10 @@ def deploy_cnfd_from_bicep( f"version {self.config.version}" ) message = ( - f"Deploy bicep template for NFD {self.config.nf_name} " - f"version {self.config.version} " - f"into {self.config.publisher_resource_group_name} under publisher " - f"{self.config.publisher_name}" + f"Deploy bicep template for NFD {self.config.nf_name} version" + f" {self.config.version} into" + f" {self.config.publisher_resource_group_name} under publisher" + f" {self.config.publisher_name}" ) print(message) logger.info(message) @@ -356,12 +356,6 @@ def deploy_cnfd_from_bicep( "/" )[-5] # Check whether the source registry has a namespace in the repository path - source_registry_match = re.search( - SOURCE_ACR_REGEX, - self.config.source_registry_id - ) - # Config validation has already checked and raised an error if the regex doesn't - # match source_registry_namespace: str = "" if self.config.source_registry_namespace: source_registry_namespace = f"{self.config.source_registry_namespace}/" @@ -444,7 +438,7 @@ def deploy_nsd_from_bicep( # one produced from building the NSDV using this CLI bicep_path = os.path.join( self.config.build_output_folder_name, - NSD_DEFINITION_BICEP_FILE, + NSD_BICEP_FILENAME, ) if parameters_json_file: @@ -477,7 +471,8 @@ def deploy_nsd_from_bicep( logger.info(message) self.deploy_bicep_template(bicep_path, parameters) print( - f"Deployed NSD {self.config.nsdg_name} version {self.config.nsd_version}." + f"Deployed NSD {self.config.nsdg_name} " + f"version {self.config.nsd_version}." ) if skip == ARTIFACT_UPLOAD: print("Skipping artifact upload") @@ -495,7 +490,7 @@ def deploy_nsd_from_bicep( # Convert the NF bicep to ARM arm_template_artifact_json = self.convert_bicep_to_arm( - os.path.join(self.config.build_output_folder_name, NF_DEFINITION_BICEP_FILE) + os.path.join(self.config.build_output_folder_name, NF_DEFINITION_BICEP_FILENAME) ) with open(self.config.arm_template.file_path, "w", encoding="utf-8") as file: @@ -520,11 +515,11 @@ def deploy_manifest_template( if not manifest_bicep_path: if configuration_type == NSD: - file_name = NSD_ARTIFACT_MANIFEST_BICEP_FILE + file_name = NSD_ARTIFACT_MANIFEST_BICEP_FILENAME elif configuration_type == VNF: - file_name = VNF_MANIFEST_BICEP_TEMPLATE + file_name = VNF_MANIFEST_BICEP_TEMPLATE_FILENAME elif configuration_type == CNF: - file_name = CNF_MANIFEST_BICEP_TEMPLATE + file_name = CNF_MANIFEST_BICEP_TEMPLATE_FILENAME manifest_bicep_path = os.path.join( self.config.build_output_folder_name, @@ -618,9 +613,7 @@ def validate_and_deploy_arm_template( :return: Output dictionary from the bicep template. """ # Get current time from the time module and remove all digits after the decimal point - current_time = str(time.time()).split(".")[ - 0 - ] # pylint: disable=use-maxsplit-arg + current_time = str(time.time()).split(".", maxsplit=1)[0] # Add a timestamp to the deployment name to ensure it is unique deployment_name = f"AOSM_CLI_deployment_into_{resource_group}_{current_time}" @@ -642,14 +635,18 @@ def validate_and_deploy_arm_template( if validation_res.error: # Validation failed so don't even try to deploy logger.error( - "Template for resource group %s has failed validation. The message was: %s.\ - See logs for additional details.", + ( + "Template for resource group %s has failed validation. The message" + " was: %s. See logs for additional details." + ), resource_group, validation_res.error.message, ) logger.debug( - "Template for resource group %s failed validation. \ - Full error details: %s", + ( + "Template for resource group %s failed validation." + " Full error details: %s" + ), resource_group, validation_res.error, ) @@ -684,10 +681,10 @@ def validate_and_deploy_arm_template( if depl_props.provisioning_state != "Succeeded": logger.debug("Failed to provision: %s", depl_props) raise RuntimeError( - f"Deploy of template to resource group" + "Deploy of template to resource group" f" {resource_group} proceeded but the provisioning" - f" state returned is {depl_props.provisioning_state}. " - f"\nAborting" + f" state returned is {depl_props.provisioning_state}." + "\nAborting" ) logger.debug( "Provisioning state of deployment %s : %s", @@ -697,7 +694,8 @@ def validate_and_deploy_arm_template( return depl_props.outputs - def convert_bicep_to_arm(self, bicep_template_path: str) -> Any: + @staticmethod + def convert_bicep_to_arm(bicep_template_path: str) -> Any: """ Convert a bicep template into an ARM template. @@ -735,14 +733,16 @@ def convert_bicep_to_arm(self, bicep_template_path: str) -> Any: stderr=subprocess.PIPE, ) logger.debug("az bicep output: %s", str(bicep_output)) - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError as err: logger.error( - "ARM template compilation failed! See logs for full " - "output. The failing command was %s", - e.cmd, + ( + "ARM template compilation failed! See logs for full " + "output. The failing command was %s" + ), + err.cmd, ) - logger.debug("bicep build stdout: %s", e.stdout) - logger.debug("bicep build stderr: %s", e.stderr) + logger.debug("bicep build stdout: %s", err.stdout) + logger.debug("bicep build stderr: %s", err.stderr) raise with open( diff --git a/src/aosm/azext_aosm/deploy/pre_deploy.py b/src/aosm/azext_aosm/deploy/pre_deploy.py index 5446dd5baf0..0c32862d3f0 100644 --- a/src/aosm/azext_aosm/deploy/pre_deploy.py +++ b/src/aosm/azext_aosm/deploy/pre_deploy.py @@ -67,9 +67,10 @@ def ensure_resource_group_exists(self, resource_group_name: str) -> None: ): if isinstance(self.config, NSConfiguration): raise AzCLIError( - f"Resource Group {resource_group_name} does not exist. Please create it before running this command." + f"Resource Group {resource_group_name} does not exist. Please" + " create it before running this command." ) - logger.info(f"RG {resource_group_name} not found. Create it.") + logger.info("RG %s not found. Create it.", resource_group_name) print(f"Creating resource group {resource_group_name}.") rg_params: ResourceGroup = ResourceGroup(location=self.config.location) self.api_clients.resource_client.resource_groups.create_or_update( @@ -106,17 +107,20 @@ def ensure_publisher_exists( resource_group_name, publisher_name ) print( - f"Publisher {publisher.name} exists in resource group {resource_group_name}" + f"Publisher {publisher.name} exists in resource group" + f" {resource_group_name}" ) - except azure_exceptions.ResourceNotFoundError: + except azure_exceptions.ResourceNotFoundError as ex: if isinstance(self.config, NSConfiguration): raise AzCLIError( - f"Publisher {publisher_name} does not exist. Please create it before running this command." - ) + f"Publisher {publisher_name} does not exist. Please create it" + " before running this command." + ) from ex # Create the publisher logger.info("Creating publisher %s if it does not exist", publisher_name) print( - f"Creating publisher {publisher_name} in resource group {resource_group_name}" + f"Creating publisher {publisher_name} in resource group" + f" {resource_group_name}" ) pub = self.api_clients.aosm_client.publishers.begin_create_or_update( resource_group_name=resource_group_name, @@ -196,11 +200,13 @@ def ensure_artifact_store_exists( artifact_store_name=artifact_store_name, ) print( - f"Artifact store {artifact_store_name} exists in resource group {resource_group_name}" + f"Artifact store {artifact_store_name} exists in resource group" + f" {resource_group_name}" ) - except azure_exceptions.ResourceNotFoundError: + except azure_exceptions.ResourceNotFoundError as ex: print( - f"Create Artifact Store {artifact_store_name} of type {artifact_store_type}" + f"Create Artifact Store {artifact_store_name} of type" + f" {artifact_store_type}" ) poller = ( self.api_clients.aosm_client.artifact_stores.begin_create_or_update( @@ -218,15 +224,16 @@ def ensure_artifact_store_exists( arty: ArtifactStore = poller.result() if arty.provisioning_state != ProvisioningState.SUCCEEDED: - logger.debug(f"Failed to provision artifact store: {arty.name}") + logger.debug("Failed to provision artifact store: %s", arty.name) raise RuntimeError( - f"Creation of artifact store proceeded, but the provisioning" + "Creation of artifact store proceeded, but the provisioning" f" state returned is {arty.provisioning_state}. " - f"\nAborting" - ) + "\nAborting" + ) from ex logger.debug( - f"Provisioning state of {artifact_store_name}" - f": {arty.provisioning_state}" + "Provisioning state of %s: %s", + artifact_store_name, + arty.provisioning_state, ) def ensure_acr_artifact_store_exists(self) -> None: @@ -296,9 +303,10 @@ def ensure_nfdg_exists( network_function_definition_group_name=nfdg_name, ) print( - f"Network function definition group {nfdg_name} exists in resource group {resource_group_name}" + f"Network function definition group {nfdg_name} exists in resource" + f" group {resource_group_name}" ) - except azure_exceptions.ResourceNotFoundError: + except azure_exceptions.ResourceNotFoundError as ex: print(f"Create Network Function Definition Group {nfdg_name}") poller = self.api_clients.aosm_client.network_function_definition_groups.begin_create_or_update( resource_group_name=resource_group_name, @@ -313,15 +321,16 @@ def ensure_nfdg_exists( if nfdg.provisioning_state != ProvisioningState.SUCCEEDED: logger.debug( - f"Failed to provision Network Function Definition Group: {nfdg.name}" + "Failed to provision Network Function Definition Group: %s", + nfdg.name, ) raise RuntimeError( - f"Creation of Network Function Definition Group proceeded, but the provisioning" - f" state returned is {nfdg.provisioning_state}. " - f"\nAborting" - ) + "Creation of Network Function Definition Group proceeded, but the" + f" provisioning state returned is {nfdg.provisioning_state}." + " \nAborting" + ) from ex logger.debug( - f"Provisioning state of {nfdg_name}" f": {nfdg.provisioning_state}" + "Provisioning state of %s: %s", nfdg_name, nfdg.provisioning_state ) def ensure_config_nfdg_exists( @@ -364,10 +373,10 @@ def does_artifact_manifest_exist( artifact_store_name=store_name, artifact_manifest_name=manifest_name, ) - logger.debug(f"Artifact manifest {manifest_name} exists") + logger.debug("Artifact manifest %s exists", manifest_name) return True except azure_exceptions.ResourceNotFoundError: - logger.debug(f"Artifact manifest {manifest_name} does not exist") + logger.debug("Artifact manifest %s does not exist", manifest_name) return False def do_config_artifact_manifests_exist( @@ -390,12 +399,13 @@ def do_config_artifact_manifests_exist( ) if acr_manny_exists and sa_manny_exists: return True - elif acr_manny_exists or sa_manny_exists: + if acr_manny_exists or sa_manny_exists: raise AzCLIError( - "Only one artifact manifest exists. Cannot proceed. Please delete the NFDV using `az aosm nfd delete` and start the publish again from scratch." + "Only one artifact manifest exists. Cannot proceed. Please delete" + " the NFDV using `az aosm nfd delete` and start the publish again" + " from scratch." ) - else: - return False + return False return acr_manny_exists diff --git a/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py b/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py index 506a70e04b0..271b0fa2b9e 100644 --- a/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py +++ b/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py @@ -19,20 +19,20 @@ from azext_aosm._configuration import CNFConfiguration, HelmPackageConfig from azext_aosm.generate_nfd.nfd_generator_base import NFDGenerator from azext_aosm.util.constants import ( - CNF_DEFINITION_BICEP_TEMPLATE, - CNF_DEFINITION_JINJA2_SOURCE_TEMPLATE, - CNF_MANIFEST_BICEP_TEMPLATE, - CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE, - CONFIG_MAPPINGS, + CNF_DEFINITION_BICEP_TEMPLATE_FILENAME, + CNF_DEFINITION_JINJA2_SOURCE_TEMPLATE_FILENAME, + CNF_MANIFEST_BICEP_TEMPLATE_FILENAME, + CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE_FILENAME, + CONFIG_MAPPINGS_DIR_NAME, DEPLOYMENT_PARAMETER_MAPPING_REGEX, + DEPLOYMENT_PARAMETERS_FILENAME, + GENERATED_VALUES_MAPPINGS_DIR_NAME, IMAGE_NAME_AND_VERSION_REGEX, IMAGE_PATH_REGEX, - DEPLOYMENT_PARAMETERS, - GENERATED_VALUES_MAPPINGS, - SCHEMA_PREFIX, - SCHEMAS, IMAGE_PULL_SECRETS_START_STRING, IMAGE_START_STRING, + SCHEMA_PREFIX, + SCHEMAS_DIR_NAME, ) from azext_aosm.util.utils import input_ack @@ -58,17 +58,16 @@ def __init__(self, config: CNFConfiguration, interactive: bool = False): mapping file from the values.yaml in the helm package, and also requires the mapping file in config to be blank. """ - super(NFDGenerator, self).__init__() self.config = config self.nfd_jinja2_template_path = os.path.join( os.path.dirname(__file__), "templates", - CNF_DEFINITION_JINJA2_SOURCE_TEMPLATE, + CNF_DEFINITION_JINJA2_SOURCE_TEMPLATE_FILENAME, ) self.manifest_jinja2_template_path = os.path.join( os.path.dirname(__file__), "templates", - CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE, + CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE_FILENAME, ) self.output_folder_name = self.config.build_output_folder_name @@ -77,7 +76,7 @@ def __init__(self, config: CNFConfiguration, interactive: bool = False): self.deployment_parameter_schema = SCHEMA_PREFIX self._bicep_path = os.path.join( - self.output_folder_name, CNF_DEFINITION_BICEP_TEMPLATE + self.output_folder_name, CNF_DEFINITION_BICEP_TEMPLATE_FILENAME ) self.interactive = interactive self._tmp_folder_name = "" @@ -179,8 +178,8 @@ def _extract_chart(self, path: str) -> None: else: raise InvalidTemplateError( - f"ERROR: The helm package '{path}' is not a .tgz, .tar or .tar.gz file.\ - Please fix this and run the command again." + f"ERROR: The helm package '{path}' is not a .tgz, .tar or .tar.gz file." + " Please fix this and run the command again." ) def _generate_chart_value_mappings(self, helm_package: HelmPackageConfig) -> None: @@ -203,11 +202,11 @@ def _generate_chart_value_mappings(self, helm_package: HelmPackageConfig) -> Non ) # Write the mapping to a file - folder_name = os.path.join(self._tmp_folder_name, GENERATED_VALUES_MAPPINGS) + folder_name = os.path.join(self._tmp_folder_name, GENERATED_VALUES_MAPPINGS_DIR_NAME) os.makedirs(folder_name, exist_ok=True) mapping_filepath = os.path.join( self._tmp_folder_name, - GENERATED_VALUES_MAPPINGS, + GENERATED_VALUES_MAPPINGS_DIR_NAME, f"{helm_package.name}-generated-mapping.yaml", ) with open(mapping_filepath, "w", encoding="UTF-8") as mapping_file: @@ -254,7 +253,7 @@ def write_manifest_bicep_file(self) -> None: artifacts=self.artifacts, ) - path = os.path.join(self._tmp_folder_name, CNF_MANIFEST_BICEP_TEMPLATE) + path = os.path.join(self._tmp_folder_name, CNF_MANIFEST_BICEP_TEMPLATE_FILENAME) with open(path, "w", encoding="utf-8") as f: f.write(bicep_contents) @@ -269,11 +268,11 @@ def write_nfd_bicep_file(self) -> None: ) bicep_contents: str = template.render( - deployParametersPath=os.path.join(SCHEMAS, DEPLOYMENT_PARAMETERS), + deployParametersPath=os.path.join(SCHEMAS_DIR_NAME, DEPLOYMENT_PARAMETERS_FILENAME), nf_application_configurations=self.nf_application_configurations, ) - path = os.path.join(self._tmp_folder_name, CNF_DEFINITION_BICEP_TEMPLATE) + path = os.path.join(self._tmp_folder_name, CNF_DEFINITION_BICEP_TEMPLATE_FILENAME) with open(path, "w", encoding="utf-8") as f: f.write(bicep_contents) @@ -284,7 +283,7 @@ def write_schema_to_file(self) -> None: logger.debug("Create deploymentParameters.json") - full_schema = os.path.join(self._tmp_folder_name, DEPLOYMENT_PARAMETERS) + full_schema = os.path.join(self._tmp_folder_name, DEPLOYMENT_PARAMETERS_FILENAME) with open(full_schema, "w", encoding="UTF-8") as f: json.dump(self.deployment_parameter_schema, f, indent=4) @@ -296,16 +295,16 @@ def copy_to_output_folder(self) -> None: logger.info("Create NFD bicep %s", self.output_folder_name) os.mkdir(self.output_folder_name) - os.mkdir(os.path.join(self.output_folder_name, SCHEMAS)) + os.mkdir(os.path.join(self.output_folder_name, SCHEMAS_DIR_NAME)) # Copy the nfd and the manifest bicep files to the output folder tmp_nfd_bicep_path = os.path.join( - self._tmp_folder_name, CNF_DEFINITION_BICEP_TEMPLATE + self._tmp_folder_name, CNF_DEFINITION_BICEP_TEMPLATE_FILENAME ) shutil.copy(tmp_nfd_bicep_path, self.output_folder_name) tmp_manifest_bicep_path = os.path.join( - self._tmp_folder_name, CNF_MANIFEST_BICEP_TEMPLATE + self._tmp_folder_name, CNF_MANIFEST_BICEP_TEMPLATE_FILENAME ) shutil.copy(tmp_manifest_bicep_path, self.output_folder_name) @@ -313,21 +312,21 @@ def copy_to_output_folder(self) -> None: # the output directory so that the user can edit them and re-run the build if # required if os.path.exists( - os.path.join(self._tmp_folder_name, GENERATED_VALUES_MAPPINGS) + os.path.join(self._tmp_folder_name, GENERATED_VALUES_MAPPINGS_DIR_NAME) ): generated_mappings_path = os.path.join( - self.output_folder_name, GENERATED_VALUES_MAPPINGS + self.output_folder_name, GENERATED_VALUES_MAPPINGS_DIR_NAME ) shutil.copytree( - os.path.join(self._tmp_folder_name, GENERATED_VALUES_MAPPINGS), + os.path.join(self._tmp_folder_name, GENERATED_VALUES_MAPPINGS_DIR_NAME), generated_mappings_path, ) # Copy the JSON config mappings and deploymentParameters schema that are used # for the NFD to the output folder - tmp_config_mappings_path = os.path.join(self._tmp_folder_name, CONFIG_MAPPINGS) + tmp_config_mappings_path = os.path.join(self._tmp_folder_name, CONFIG_MAPPINGS_DIR_NAME) output_config_mappings_path = os.path.join( - self.output_folder_name, CONFIG_MAPPINGS + self.output_folder_name, CONFIG_MAPPINGS_DIR_NAME ) shutil.copytree( tmp_config_mappings_path, @@ -335,9 +334,9 @@ def copy_to_output_folder(self) -> None: dirs_exist_ok=True, ) - tmp_schema_path = os.path.join(self._tmp_folder_name, DEPLOYMENT_PARAMETERS) + tmp_schema_path = os.path.join(self._tmp_folder_name, DEPLOYMENT_PARAMETERS_FILENAME) output_schema_path = os.path.join( - self.output_folder_name, SCHEMAS, DEPLOYMENT_PARAMETERS + self.output_folder_name, SCHEMAS_DIR_NAME, DEPLOYMENT_PARAMETERS_FILENAME ) shutil.copy( tmp_schema_path, @@ -368,7 +367,8 @@ def generate_nf_application_config( "valueMappingsPath": self.jsonify_value_mappings(helm_package), } - def _find_yaml_files(self, directory) -> Iterator[str]: + @staticmethod + def _find_yaml_files(directory) -> Iterator[str]: """ Find all yaml files in given directory. @@ -443,7 +443,7 @@ def get_artifact_list( """ Get the list of artifacts for the chart. - :param helm_package: The helm package config. + :param helm_package: The helm package config. :param image_line_matches: The list of image line matches. """ artifact_list = [] @@ -481,14 +481,14 @@ def get_chart_mapping_schema( ) if not os.path.exists(mappings_path): raise InvalidTemplateError( - f"ERROR: The helm package '{helm_package.name}' does not have a valid values mappings file. \ - The file at '{helm_package.path_to_mappings}' does not exist.\n\ - Please fix this and run the command again." + f"ERROR: The helm package '{helm_package.name}' does not have a valid values" + " mappings file. The file at '{helm_package.path_to_mappings}' does not exist." + "\nPlease fix this and run the command again." ) if not os.path.exists(values_schema): raise InvalidTemplateError( - f"ERROR: The helm package '{helm_package.name}' is missing values.schema.json.\n\ - Please fix this and run the command again." + f"ERROR: The helm package '{helm_package.name}' is missing values.schema.json." + "\nPlease fix this and run the command again." ) with open(mappings_path, "r", encoding="utf-8") as stream: @@ -505,16 +505,16 @@ def get_chart_mapping_schema( new_schema = self.search_schema(deploy_params_dict, schema_data) except KeyError as e: raise InvalidTemplateError( - "ERROR: There is a problem with your schema or " - f"values for the helm package '{helm_package.name}'. \ - Please fix this and run the command again." + "ERROR: There is a problem with your schema or values for the helm" + f" package '{helm_package.name}'." + "\nPlease fix this and run the command again." ) from e logger.debug("Generated chart mapping schema for %s", helm_package.name) return new_schema + @staticmethod def traverse_dict( - self, dict_to_search: Dict[Any, Any], target_regex: str ) -> Dict[str, List[str]]: @@ -539,21 +539,13 @@ def traverse_dict( # If the value is a dictionary if isinstance(v, dict): # Add the dictionary to the stack with the path - stack.append( - (v, path + [k]) - ) + stack.append((v, path + [k])) # If the value is a string + matches target regex - elif isinstance(v, str) and re.search( - target_regex, v - ): + elif isinstance(v, str) and re.search(target_regex, v): # Take the match i.e, foo from {deployParameter.foo} - match = re.search( - target_regex, v - ) + match = re.search(target_regex, v) # Add it to the result dictionary with its path as the value - result[match.group(1)] = path + [ - k - ] + result[match.group(1)] = path + [k] elif isinstance(v, list): logger.debug("Found a list %s", v) for i in v: @@ -575,8 +567,8 @@ def traverse_dict( ) return result + @staticmethod def search_schema( - self, deployParams_paths: Dict[str, List[str]], full_schema ) -> Dict[str, Dict[str, str]]: @@ -709,8 +701,8 @@ def get_chart_name_and_version( if not os.path.exists(chart): raise InvalidTemplateError( - f"There is no Chart.yaml file in the helm package '{helm_package.name}'. \ - Please fix this and run the command again." + f"There is no Chart.yaml file in the helm package '{helm_package.name}'. " + "\nPlease fix this and run the command again." ) with open(chart, "r", encoding="utf-8") as f: @@ -720,9 +712,9 @@ def get_chart_name_and_version( chart_version = data["version"] else: raise FileOperationError( - "A name or version is missing from Chart.yaml in the " - f"helm package '{helm_package.name}'. " - "Please fix this and run the command again." + "A name or version is missing from Chart.yaml in the helm package" + f" '{helm_package.name}'." + "\nPlease fix this and run the command again." ) return (chart_name, chart_version) @@ -731,7 +723,7 @@ def jsonify_value_mappings(self, helm_package: HelmPackageConfig) -> str: """Yaml->JSON values mapping file, then return path to it.""" mappings_yaml = helm_package.path_to_mappings - mappings_folder_path = os.path.join(self._tmp_folder_name, CONFIG_MAPPINGS) + mappings_folder_path = os.path.join(self._tmp_folder_name, CONFIG_MAPPINGS_DIR_NAME) mappings_filename = f"{helm_package.name}-mappings.json" if not os.path.exists(mappings_folder_path): @@ -746,4 +738,4 @@ def jsonify_value_mappings(self, helm_package: HelmPackageConfig) -> str: json.dump(data, file, indent=4) logger.debug("Generated parameter mappings for %s", helm_package.name) - return os.path.join(CONFIG_MAPPINGS, mappings_filename) + return os.path.join(CONFIG_MAPPINGS_DIR_NAME, mappings_filename) diff --git a/src/aosm/azext_aosm/generate_nfd/nfd_generator_base.py b/src/aosm/azext_aosm/generate_nfd/nfd_generator_base.py index 3072f62394e..a57604f3009 100644 --- a/src/aosm/azext_aosm/generate_nfd/nfd_generator_base.py +++ b/src/aosm/azext_aosm/generate_nfd/nfd_generator_base.py @@ -3,24 +3,16 @@ # License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------- """Contains a base class for generating NFDs.""" +from abc import ABC from knack.log import get_logger logger = get_logger(__name__) -class NFDGenerator: +class NFDGenerator(ABC): """A class for generating an NFD from a config file.""" # pylint: disable=too-few-public-methods - def __init__( - self, - ) -> None: - """ - Superclass for NFD generators. - - The sub-classes do the actual work - """ - def generate_nfd(self) -> None: """No-op on base class.""" - logger.error("Generate NFD called on base class. No-op") + raise NotImplementedError diff --git a/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py b/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py index e9581a6b9a1..ec2d44a1214 100644 --- a/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py +++ b/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py @@ -16,16 +16,16 @@ from azext_aosm._configuration import VNFConfiguration from azext_aosm.generate_nfd.nfd_generator_base import NFDGenerator from azext_aosm.util.constants import ( - CONFIG_MAPPINGS, - DEPLOYMENT_PARAMETERS, - OPTIONAL_DEPLOYMENT_PARAMETERS_FILE, + CONFIG_MAPPINGS_DIR_NAME, + DEPLOYMENT_PARAMETERS_FILENAME, + OPTIONAL_DEPLOYMENT_PARAMETERS_FILENAME, OPTIONAL_DEPLOYMENT_PARAMETERS_HEADING, SCHEMA_PREFIX, - SCHEMAS, - TEMPLATE_PARAMETERS, - VHD_PARAMETERS, - VNF_DEFINITION_BICEP_TEMPLATE, - VNF_MANIFEST_BICEP_TEMPLATE, + SCHEMAS_DIR_NAME, + TEMPLATE_PARAMETERS_FILENAME, + VHD_PARAMETERS_FILENAME, + VNF_DEFINITION_BICEP_TEMPLATE_FILENAME, + VNF_MANIFEST_BICEP_TEMPLATE_FILENAME, ) from azext_aosm.util.utils import input_ack @@ -61,10 +61,9 @@ class VnfNfdGenerator(NFDGenerator): """ def __init__(self, config: VNFConfiguration, order_params: bool, interactive: bool): - super(NFDGenerator, self).__init__() self.config = config - self.bicep_template_name = VNF_DEFINITION_BICEP_TEMPLATE - self.manifest_template_name = VNF_MANIFEST_BICEP_TEMPLATE + self.bicep_template_name = VNF_DEFINITION_BICEP_TEMPLATE_FILENAME + self.manifest_template_name = VNF_MANIFEST_BICEP_TEMPLATE_FILENAME self.arm_template_path = self.config.arm_template.file_path self.output_folder_name = self.config.build_output_folder_name @@ -154,11 +153,11 @@ def vm_parameters_ordered(self) -> Dict[str, Any]: def create_parameter_files(self) -> None: """Create the Deployment and Template json parameter files.""" - schemas_folder_path = os.path.join(self.tmp_folder_name, SCHEMAS) + schemas_folder_path = os.path.join(self.tmp_folder_name, SCHEMAS_DIR_NAME) os.mkdir(schemas_folder_path) self.write_deployment_parameters(schemas_folder_path) - mappings_folder_path = os.path.join(self.tmp_folder_name, CONFIG_MAPPINGS) + mappings_folder_path = os.path.join(self.tmp_folder_name, CONFIG_MAPPINGS_DIR_NAME) os.mkdir(mappings_folder_path) self.write_template_parameters(mappings_folder_path) self.write_vhd_parameters(mappings_folder_path) @@ -214,7 +213,7 @@ def write_deployment_parameters(self, folder_path: str) -> None: for key in vm_parameters_to_exclude: self.vm_parameters.pop(key, None) - deployment_parameters_path = os.path.join(folder_path, DEPLOYMENT_PARAMETERS) + deployment_parameters_path = os.path.join(folder_path, DEPLOYMENT_PARAMETERS_FILENAME) # Heading for the deployParameters schema deploy_parameters_full: Dict[str, Any] = SCHEMA_PREFIX @@ -234,7 +233,7 @@ def write_deployment_parameters(self, folder_path: str) -> None: if not self.interactive: if nfd_parameters_with_default: optional_deployment_parameters_path = os.path.join( - folder_path, OPTIONAL_DEPLOYMENT_PARAMETERS_FILE + folder_path, OPTIONAL_DEPLOYMENT_PARAMETERS_FILENAME ) with open( optional_deployment_parameters_path, "w", encoding="utf-8" @@ -243,7 +242,7 @@ def write_deployment_parameters(self, folder_path: str) -> None: _file.write(json.dumps(nfd_parameters_with_default, indent=4)) print( "Optional ARM parameters detected. Created " - f"{OPTIONAL_DEPLOYMENT_PARAMETERS_FILE} to help you choose which " + f"{OPTIONAL_DEPLOYMENT_PARAMETERS_FILENAME} to help you choose which " "to expose." ) @@ -253,7 +252,7 @@ def write_template_parameters(self, folder_path: str) -> None: :param folder_path: The folder to put this file in. """ - logger.debug("Create %s", TEMPLATE_PARAMETERS) + logger.debug("Create %s", TEMPLATE_PARAMETERS_FILENAME) vm_parameters = ( self.vm_parameters_ordered if self.order_params else self.vm_parameters ) @@ -267,7 +266,7 @@ def write_template_parameters(self, folder_path: str) -> None: template_parameters[key] = f"{{deployParameters.{key}}}" - template_parameters_path = os.path.join(folder_path, TEMPLATE_PARAMETERS) + template_parameters_path = os.path.join(folder_path, TEMPLATE_PARAMETERS_FILENAME) with open(template_parameters_path, "w", encoding="utf-8") as _file: _file.write(json.dumps(template_parameters, indent=4)) @@ -294,7 +293,7 @@ def write_vhd_parameters(self, folder_path: str) -> None: "azureDeployLocation": azureDeployLocation, } - vhd_parameters_path = os.path.join(folder_path, VHD_PARAMETERS) + vhd_parameters_path = os.path.join(folder_path, VHD_PARAMETERS_FILENAME) with open(vhd_parameters_path, "w", encoding="utf-8") as _file: _file.write(json.dumps(vhd_parameters, indent=4)) diff --git a/src/aosm/azext_aosm/generate_nsd/nsd_generator.py b/src/aosm/azext_aosm/generate_nsd/nsd_generator.py index 63f723d7254..d84c1693103 100644 --- a/src/aosm/azext_aosm/generate_nsd/nsd_generator.py +++ b/src/aosm/azext_aosm/generate_nsd/nsd_generator.py @@ -13,25 +13,24 @@ from jinja2 import Template from knack.log import get_logger -from azext_aosm.vendored_sdks.models import NFVIType from azext_aosm._configuration import NSConfiguration from azext_aosm.util.constants import ( - CONFIG_MAPPINGS, - NF_DEFINITION_BICEP_FILE, + CNF, + CONFIG_MAPPINGS_DIR_NAME, + NF_DEFINITION_BICEP_FILENAME, NF_TEMPLATE_JINJA2_SOURCE_TEMPLATE, - NSD_ARTIFACT_MANIFEST_BICEP_FILE, - NSD_ARTIFACT_MANIFEST_SOURCE_TEMPLATE, - NSD_CONFIG_MAPPING_FILE, - NSD_DEFINITION_BICEP_FILE, + NSD_ARTIFACT_MANIFEST_BICEP_FILENAME, + NSD_ARTIFACT_MANIFEST_SOURCE_TEMPLATE_FILENAME, + NSD_CONFIG_MAPPING_FILENAME, + NSD_BICEP_FILENAME, NSD_DEFINITION_JINJA2_SOURCE_TEMPLATE, - SCHEMAS, - TEMPLATES, - CNF, + SCHEMAS_DIR_NAME, + TEMPLATES_DIR_NAME, VNF, ) from azext_aosm.util.management_clients import ApiClients -from azext_aosm.vendored_sdks.models import NetworkFunctionDefinitionVersion +from azext_aosm.vendored_sdks.models import NetworkFunctionDefinitionVersion, NFVIType logger = get_logger(__name__) @@ -62,10 +61,10 @@ def __init__(self, api_clients: ApiClients, config: NSConfiguration): self.config = config self.nsd_bicep_template_name = NSD_DEFINITION_JINJA2_SOURCE_TEMPLATE self.nf_bicep_template_name = NF_TEMPLATE_JINJA2_SOURCE_TEMPLATE - self.nsd_bicep_output_name = NSD_DEFINITION_BICEP_FILE - self.nfdv_parameter_name = \ + self.nsd_bicep_output_name = NSD_BICEP_FILENAME + self.nfdv_parameter_name = ( f"{self.config.network_function_definition_group_name.replace('-', '_')}_nfd_version" - + ) self.build_folder_name = self.config.build_output_folder_name nfdv = self._get_nfdv(config, api_clients) print("Finding the deploy parameters of the NFDV resource") @@ -73,7 +72,9 @@ def __init__(self, api_clients: ApiClients, config: NSConfiguration): raise NotImplementedError( "NFDV has no deploy parameters, cannot generate NSD." ) - self.deploy_parameters: Optional[Dict[str, Any]] = json.loads(nfdv.deploy_parameters) + self.deploy_parameters: Optional[Dict[str, Any]] = json.loads( + nfdv.deploy_parameters + ) def _get_nfdv( self, config: NSConfiguration, api_clients @@ -130,12 +131,14 @@ def config_group_schema_dict(self) -> Dict[str, Any]: # Add in the NFDV version as a parameter. description_string = ( f"The version of the {self.config.network_function_definition_group_name} " - f"NFD to use. This version must be compatible with (have the same " - f"parameters exposed as) " + "NFD to use. This version must be compatible with (have the same " + "parameters exposed as) " f"{self.config.network_function_definition_version_name}." ) - cgs_dict["properties"][self.nfdv_parameter_name] = \ - {"type": "string", "description": description_string} + cgs_dict["properties"][self.nfdv_parameter_name] = { + "type": "string", + "description": description_string, + } cgs_dict["required"].append(self.nfdv_parameter_name) managed_identity_description_string = ( @@ -145,8 +148,10 @@ def config_group_schema_dict(self) -> Dict[str, Any]: "userAssignedIdentities/{identityName}. " "If you wish to use a system assigned identity, set this to a blank string." ) - cgs_dict["properties"]["managedIdentity"] = \ - {"type": "string", "description": managed_identity_description_string} + cgs_dict["properties"]["managedIdentity"] = { + "type": "string", + "description": managed_identity_description_string, + } cgs_dict["required"].append("managedIdentity") if self.config.network_function_type == CNF: @@ -165,11 +170,13 @@ def config_group_schema_dict(self) -> Dict[str, Any]: def create_config_group_schema_files(self) -> None: """Create the Schema and configMappings json files.""" - temp_schemas_folder_path = os.path.join(self.tmp_folder_name, SCHEMAS) + temp_schemas_folder_path = os.path.join(self.tmp_folder_name, SCHEMAS_DIR_NAME) os.mkdir(temp_schemas_folder_path) self.write_schema(temp_schemas_folder_path) - temp_mappings_folder_path = os.path.join(self.tmp_folder_name, CONFIG_MAPPINGS) + temp_mappings_folder_path = os.path.join( + self.tmp_folder_name, CONFIG_MAPPINGS_DIR_NAME + ) os.mkdir(temp_mappings_folder_path) self.write_config_mappings(temp_mappings_folder_path) @@ -179,14 +186,14 @@ def write_schema(self, folder_path: str) -> None: :param folder_path: The folder to put this file in. """ - logger.debug(f"Create {self.config.cg_schema_name}.json") + logger.debug("Create %s.json", self.config.cg_schema_name) schema_path = os.path.join(folder_path, f"{self.config.cg_schema_name}.json") with open(schema_path, "w") as _file: _file.write(json.dumps(self.config_group_schema_dict, indent=4)) - logger.debug(f"{schema_path} created") + logger.debug("%s created", schema_path) def write_config_mappings(self, folder_path: str) -> None: """ @@ -202,12 +209,12 @@ def write_config_mappings(self, folder_path: str) -> None: for key in deploy_properties } - config_mappings_path = os.path.join(folder_path, NSD_CONFIG_MAPPING_FILE) + config_mappings_path = os.path.join(folder_path, NSD_CONFIG_MAPPING_FILENAME) with open(config_mappings_path, "w") as _file: _file.write(json.dumps(config_mappings, indent=4)) - logger.debug(f"{config_mappings_path} created") + logger.debug("%s created", config_mappings_path) def write_nf_bicep(self) -> None: """Write out the Network Function bicep file.""" @@ -239,22 +246,30 @@ def write_nf_bicep(self) -> None: self.generate_bicep( self.nf_bicep_template_name, - NF_DEFINITION_BICEP_FILE, + NF_DEFINITION_BICEP_FILENAME, { "bicep_params": bicep_params, "deploymentValues": bicep_deploymentValues, "network_function_name": self.config.network_function_name, "publisher_name": self.config.publisher_name, - "network_function_definition_group_name": self.config.network_function_definition_group_name, - "network_function_definition_version_parameter": self.nfdv_parameter_name, - "network_function_definition_offering_location": self.config.network_function_definition_offering_location, + "network_function_definition_group_name": ( + self.config.network_function_definition_group_name + ), + "network_function_definition_version_parameter": ( + self.nfdv_parameter_name + ), + "network_function_definition_offering_location": ( + self.config.network_function_definition_offering_location + ), "location": self.config.location, # Ideally we would use the network_function_type from reading the actual # NF, as we do for deployParameters, but the SDK currently doesn't # support this and needs to be rebuilt to do so. - "nfvi_type": NFVIType.AZURE_CORE.value - if self.config.network_function_type == VNF - else NFVIType.AZURE_ARC_KUBERNETES.value, + "nfvi_type": ( + NFVIType.AZURE_CORE.value + if self.config.network_function_type == VNF + else NFVIType.AZURE_ARC_KUBERNETES.value + ), "CNF": True if self.config.network_function_type == CNF else False, }, ) @@ -279,7 +294,9 @@ def write_nsd_manifest(self) -> None: logger.debug("Create NSD manifest") self.generate_bicep( - NSD_ARTIFACT_MANIFEST_SOURCE_TEMPLATE, NSD_ARTIFACT_MANIFEST_BICEP_FILE, {} + NSD_ARTIFACT_MANIFEST_SOURCE_TEMPLATE_FILENAME, + NSD_ARTIFACT_MANIFEST_BICEP_FILENAME, + {}, ) def generate_bicep(self, @@ -296,7 +313,7 @@ def generate_bicep(self, code_dir = os.path.dirname(__file__) - bicep_template_path = os.path.join(code_dir, TEMPLATES, template_name) + bicep_template_path = os.path.join(code_dir, TEMPLATES_DIR_NAME, template_name) with open(bicep_template_path, "r") as file: bicep_contents = file.read() @@ -313,7 +330,6 @@ def generate_bicep(self, def copy_to_output_folder(self) -> None: """Copy the bicep templates, config mappings and schema into the build output folder.""" - code_dir = os.path.dirname(__file__) logger.info("Create NSD bicep %s", self.build_folder_name) os.mkdir(self.build_folder_name) diff --git a/src/aosm/azext_aosm/util/constants.py b/src/aosm/azext_aosm/util/constants.py index 9e9d7492c2e..072940815b2 100644 --- a/src/aosm/azext_aosm/util/constants.py +++ b/src/aosm/azext_aosm/util/constants.py @@ -15,42 +15,43 @@ ARTIFACT_UPLOAD = "artifact-upload" # Names of files used in the repo -NSD_DEFINITION_JINJA2_SOURCE_TEMPLATE = "nsd_template.bicep.j2" -NSD_DEFINITION_BICEP_FILE = "nsd_definition.bicep" + NF_TEMPLATE_JINJA2_SOURCE_TEMPLATE = "nf_template.bicep.j2" -NF_DEFINITION_BICEP_FILE = "nf_definition.bicep" -NF_DEFINITION_JSON_FILE = "nf_definition.json" -NSD_DEFINITION_OUTPUT_BICEP_PREFIX = "nsd-bicep-templates" -NSD_ARTIFACT_MANIFEST_BICEP_FILE = "artifact_manifest.bicep" -NSD_ARTIFACT_MANIFEST_JSON_FILE = "artifact_manifest.json" -DEFINITION_OUTPUT_BICEP_PREFIX = "nfd-bicep-" -NSD_CONFIG_MAPPING_FILE = "configMappings.json" -NSD_ARTIFACT_MANIFEST_SOURCE_TEMPLATE = "artifact_manifest_template.bicep" +NF_DEFINITION_BICEP_FILENAME = "nf_definition.bicep" +NF_DEFINITION_JSON_FILENAME = "nf_definition.json" +NF_DEFINITION_OUTPUT_BICEP_PREFIX = "nfd-bicep-" +NSD_DEFINITION_JINJA2_SOURCE_TEMPLATE = "nsd_template.bicep.j2" +NSD_BICEP_FILENAME = "nsd_definition.bicep" +NSD_OUTPUT_BICEP_PREFIX = "nsd-bicep-templates" +NSD_ARTIFACT_MANIFEST_BICEP_FILENAME = "artifact_manifest.bicep" +NSD_ARTIFACT_MANIFEST_JSON_FILENAME = "artifact_manifest.json" +NSD_CONFIG_MAPPING_FILENAME = "configMappings.json" +NSD_ARTIFACT_MANIFEST_SOURCE_TEMPLATE_FILENAME = "artifact_manifest_template.bicep" -VNF_DEFINITION_BICEP_TEMPLATE = "vnfdefinition.bicep" -VNF_MANIFEST_BICEP_TEMPLATE = "vnfartifactmanifests.bicep" +VNF_DEFINITION_BICEP_TEMPLATE_FILENAME = "vnfdefinition.bicep" +VNF_MANIFEST_BICEP_TEMPLATE_FILENAME = "vnfartifactmanifests.bicep" -CNF_DEFINITION_JINJA2_SOURCE_TEMPLATE = "cnfdefinition.bicep.j2" -CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE = "cnfartifactmanifest.bicep.j2" -CNF_DEFINITION_BICEP_TEMPLATE = "cnfdefinition.bicep" -CNF_MANIFEST_BICEP_TEMPLATE = "cnfartifactmanifest.bicep" +CNF_DEFINITION_JINJA2_SOURCE_TEMPLATE_FILENAME = "cnfdefinition.bicep.j2" +CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE_FILENAME = "cnfartifactmanifest.bicep.j2" +CNF_DEFINITION_BICEP_TEMPLATE_FILENAME = "cnfdefinition.bicep" +CNF_MANIFEST_BICEP_TEMPLATE_FILENAME = "cnfartifactmanifest.bicep" -# Names of folder used in the repo -CONFIG_MAPPINGS = "configMappings" -SCHEMAS = "schemas" -TEMPLATES = "templates" -GENERATED_VALUES_MAPPINGS = "generatedValuesMappings" +# Names of directories used in the repo +CONFIG_MAPPINGS_DIR_NAME = "configMappings" +SCHEMAS_DIR_NAME = "schemas" +TEMPLATES_DIR_NAME = "templates" +GENERATED_VALUES_MAPPINGS_DIR_NAME = "generatedValuesMappings" -# Names of files when building NFDs/NSDs -DEPLOYMENT_PARAMETERS = "deploymentParameters.json" -OPTIONAL_DEPLOYMENT_PARAMETERS_FILE = "optionalDeploymentParameters.txt" -TEMPLATE_PARAMETERS = "templateParameters.json" -VHD_PARAMETERS = "vhdParameters.json" +# Items used when building NFDs/NSDs +DEPLOYMENT_PARAMETERS_FILENAME = "deploymentParameters.json" +OPTIONAL_DEPLOYMENT_PARAMETERS_FILENAME = "optionalDeploymentParameters.txt" +TEMPLATE_PARAMETERS_FILENAME = "templateParameters.json" +VHD_PARAMETERS_FILENAME = "vhdParameters.json" OPTIONAL_DEPLOYMENT_PARAMETERS_HEADING = ( "# The following parameters are optional as they have default values.\n" "# If you do not wish to expose them in the NFD, find and remove them from both\n" - f"# {DEPLOYMENT_PARAMETERS} and {TEMPLATE_PARAMETERS} (and {VHD_PARAMETERS} if\n" + f"# {DEPLOYMENT_PARAMETERS_FILENAME} and {TEMPLATE_PARAMETERS_FILENAME} (and {VHD_PARAMETERS_FILENAME} if\n" "they are there)\n" "# You can re-run the build command with the --order-params flag to order those\n" "# files with the optional parameters at the end of the file, and with the \n" @@ -58,7 +59,6 @@ ) # Deployment Schema - SCHEMA_PREFIX = { "$schema": "https://json-schema.org/draft-07/schema#", "title": "DeployParametersSchema", @@ -71,7 +71,7 @@ IMAGE_START_STRING = "image:" IMAGE_PATH_REGEX = r".Values\.([^\s})]*)" -# To match the image name and version if imagePullSecrets: is present in the yaml file +# To match the image name and version if 'imagePullSecrets:' is present in the yaml file IMAGE_PULL_SECRETS_START_STRING = "imagePullSecrets:" IMAGE_NAME_AND_VERSION_REGEX = r"\/([^\s]*):([^\s)\"}]*)" diff --git a/src/aosm/azext_aosm/util/management_clients.py b/src/aosm/azext_aosm/util/management_clients.py index 132f6feed69..fff9aa5c0a9 100644 --- a/src/aosm/azext_aosm/util/management_clients.py +++ b/src/aosm/azext_aosm/util/management_clients.py @@ -4,27 +4,22 @@ # -------------------------------------------------------------------------------------------- """Clients for the python SDK along with useful caches.""" +from dataclasses import dataclass +from typing import Optional + +from azure.mgmt.containerregistry import ContainerRegistryManagementClient from azure.mgmt.resource import ResourceManagementClient from knack.log import get_logger from azext_aosm.vendored_sdks import HybridNetworkManagementClient -from azure.mgmt.containerregistry import ContainerRegistryManagementClient -from typing import Optional - logger = get_logger(__name__) +@dataclass class ApiClients: """A class for API Clients needed throughout.""" - def __init__( - self, - aosm_client: HybridNetworkManagementClient, - resource_client: ResourceManagementClient, - container_registry_client: Optional[ContainerRegistryManagementClient] = None, - ): - """Initialise with clients.""" - self.aosm_client = aosm_client - self.resource_client = resource_client - self.container_registry_client = container_registry_client + aosm_client: HybridNetworkManagementClient + resource_client: ResourceManagementClient + container_registry_client: Optional[ContainerRegistryManagementClient] = None diff --git a/src/aosm/setup.md b/src/aosm/setup.md index 8e73437afa1..b38bd52d039 100644 --- a/src/aosm/setup.md +++ b/src/aosm/setup.md @@ -44,6 +44,8 @@ azdev linter --include-whl-extensions aosm (Not written any tests yet) azdev test aosm ``` +The standard Python tool, `black`, is useful for automatically formatting your code. + You can use python-static-checks in your dev environment if you want, to help you: ```bash pip3 install -U --index-url https://pkgs.dev.azure.com/msazuredev/AzureForOperators/_packaging/python/pypi/simple/ python-static-checks==4.0.0 diff --git a/src/aosm/setup.py b/src/aosm/setup.py index 47149f957cb..807c81eb801 100644 --- a/src/aosm/setup.py +++ b/src/aosm/setup.py @@ -6,8 +6,7 @@ # -------------------------------------------------------------------------------------------- -from codecs import open -from setuptools import setup, find_packages +from setuptools import find_packages, setup try: from azure_bdist_wheel import cmdclass From 79956e758ea313e8df15f5621b6b3d939a504cea Mon Sep 17 00:00:00 2001 From: Andy Churchard Date: Tue, 4 Jul 2023 11:34:01 +0100 Subject: [PATCH 5/7] cnf-changes --- src/aosm/azext_aosm/_configuration.py | 14 +- src/aosm/azext_aosm/custom.py | 12 +- src/aosm/azext_aosm/deploy/artifact.py | 10 +- src/aosm/azext_aosm/deploy/deploy_with_arm.py | 2 - .../generate_nfd/cnf_nfd_generator.py | 230 ++++++++---------- .../generate_nfd/nfd_generator_base.py | 15 +- .../generate_nfd/vnf_nfd_generator.py | 23 +- src/aosm/azext_aosm/util/constants.py | 3 +- 8 files changed, 148 insertions(+), 161 deletions(-) diff --git a/src/aosm/azext_aosm/_configuration.py b/src/aosm/azext_aosm/_configuration.py index ce7b4e66710..a36cca1bba5 100644 --- a/src/aosm/azext_aosm/_configuration.py +++ b/src/aosm/azext_aosm/_configuration.py @@ -13,7 +13,7 @@ NSD, NSD_OUTPUT_BICEP_PREFIX, VNF, - SOURCE_ACR_REGEX + SOURCE_ACR_REGEX, ) DESCRIPTION_MAP: Dict[str, str] = { @@ -229,7 +229,7 @@ def network_function_name(self) -> str: @property def acr_manifest_name(self) -> str: """Return the ACR manifest name from the NFD name.""" - sanitised_nf_name = self.network_function_name.lower().replace('_', '-') + sanitised_nf_name = self.network_function_name.lower().replace("_", "-") return ( f"{sanitised_nf_name}-nsd-acr-manifest-{self.nsd_version.replace('.', '-')}" ) @@ -362,9 +362,9 @@ def __post_init__(self): self.helm_packages[package_index] = HelmPackageConfig(**dict(package)) @property - def build_output_folder_name(self) -> str: - """Return the local folder for generating the bicep template to.""" - return f"{NF_DEFINITION_OUTPUT_BICEP_PREFIX}{self.nf_name}" + def output_directory_for_build(self) -> Path: + """Return the directory the build command will writes its output to""" + return Path(f"{NF_DEFINITION_OUTPUT_BICEP_PREFIX}{self.nf_name}") def validate(self): """Validate the CNF config @@ -379,7 +379,9 @@ def validate(self): if not source_registry_match or len(source_registry_match.groups()) < 2: raise ValidationError( "CNF config has an invalid source registry ID. Please run `az aosm " - "nfd generate-config` to see the valid formats.") + "nfd generate-config` to see the valid formats." + ) + def get_configuration( configuration_type: str, config_as_dict: Optional[Dict[Any, Any]] = None diff --git a/src/aosm/azext_aosm/custom.py b/src/aosm/azext_aosm/custom.py index f20eddcfe05..e69d35e8780 100644 --- a/src/aosm/azext_aosm/custom.py +++ b/src/aosm/azext_aosm/custom.py @@ -117,15 +117,15 @@ def _generate_nfd( "Generate NFD called for unrecognised definition_type. Only VNF and CNF" " have been implemented." ) - if nfd_generator.vnfd_bicep_path: + if nfd_generator.nfd_bicep_path: carry_on = input( - f"The {nfd_generator.vnfd_bicep_path.parent} directory already exists -" + f"The {nfd_generator.nfd_bicep_path.parent} directory already exists -" " delete it and continue? (y/n)" ) if carry_on != "y": raise UnclassifiedUserFault("User aborted!") - shutil.rmtree(nfd_generator.vnfd_bicep_path.parent) + shutil.rmtree(nfd_generator.nfd_bicep_path.parent) nfd_generator.generate_nfd() @@ -178,7 +178,7 @@ def publish_definition( parameters_json_file=parameters_json_file, manifest_bicep_path=manifest_file, manifest_parameters_json_file=manifest_parameters_json_file, - skip=skip + skip=skip, ) elif definition_type == CNF: deployer = DeployerViaArm(api_clients, config=config) @@ -188,7 +188,7 @@ def publish_definition( parameters_json_file=parameters_json_file, manifest_bicep_path=manifest_file, manifest_parameters_json_file=manifest_parameters_json_file, - skip=skip + skip=skip, ) else: raise ValueError( @@ -370,7 +370,7 @@ def publish_design( parameters_json_file=parameters_json_file, manifest_bicep_path=manifest_file, manifest_parameters_json_file=manifest_parameters_json_file, - skip=skip + skip=skip, ) diff --git a/src/aosm/azext_aosm/deploy/artifact.py b/src/aosm/azext_aosm/deploy/artifact.py index 8d080933abd..6fc3846a9e1 100644 --- a/src/aosm/azext_aosm/deploy/artifact.py +++ b/src/aosm/azext_aosm/deploy/artifact.py @@ -5,16 +5,16 @@ """A module to handle interacting with artifacts.""" import subprocess from dataclasses import dataclass -from typing import Union, List +from typing import List, Union from azure.cli.core.commands import LongRunningOperation -from azure.mgmt.containerregistry.models import ImportImageParameters, ImportSource +from azure.mgmt.containerregistry import ContainerRegistryManagementClient +from azure.mgmt.containerregistry.models import (ImportImageParameters, + ImportSource) from azure.storage.blob import BlobClient, BlobType from knack.log import get_logger from knack.util import CLIError from oras.client import OrasClient -from azure.cli.core.commands import LongRunningOperation -from azure.mgmt.containerregistry import ContainerRegistryManagementClient from azext_aosm._configuration import ArtifactConfig, HelmPackageConfig @@ -156,7 +156,7 @@ def copy_image( :param source_image: source image :param target_registry_resource_group_name: target registry resource group name :param target_registry_name: target registry name - :param target_tags: the list of tags to be applied to the imported image + :param target_tags: the list of tags to be applied to the imported image should be of form: namepace/name:tag or name:tag :param mode: mode for import """ diff --git a/src/aosm/azext_aosm/deploy/deploy_with_arm.py b/src/aosm/azext_aosm/deploy/deploy_with_arm.py index f5a05e3fd48..8bbc2e28cb2 100644 --- a/src/aosm/azext_aosm/deploy/deploy_with_arm.py +++ b/src/aosm/azext_aosm/deploy/deploy_with_arm.py @@ -5,7 +5,6 @@ """Contains class for deploying generated definitions using ARM.""" import json import os -import re import shutil import subprocess # noqa import tempfile @@ -37,7 +36,6 @@ VNF, VNF_DEFINITION_BICEP_TEMPLATE_FILENAME, VNF_MANIFEST_BICEP_TEMPLATE_FILENAME, - SOURCE_ACR_REGEX, ) from azext_aosm.util.management_clients import ApiClients diff --git a/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py b/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py index 271b0fa2b9e..c25a53e42f2 100644 --- a/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py +++ b/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py @@ -4,11 +4,11 @@ # -------------------------------------------------------------------------------------- """Contains a class for generating CNF NFDs and associated resources.""" import json -import os import re import shutil import tarfile import tempfile +from pathlib import Path from typing import Any, Dict, Iterator, List, Optional, Tuple import yaml @@ -23,6 +23,7 @@ CNF_DEFINITION_JINJA2_SOURCE_TEMPLATE_FILENAME, CNF_MANIFEST_BICEP_TEMPLATE_FILENAME, CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE_FILENAME, + CNF_VALUES_SCHEMA_FILENAME, CONFIG_MAPPINGS_DIR_NAME, DEPLOYMENT_PARAMETER_MAPPING_REGEX, DEPLOYMENT_PARAMETERS_FILENAME, @@ -59,38 +60,37 @@ def __init__(self, config: CNFConfiguration, interactive: bool = False): mapping file in config to be blank. """ self.config = config - self.nfd_jinja2_template_path = os.path.join( - os.path.dirname(__file__), - "templates", - CNF_DEFINITION_JINJA2_SOURCE_TEMPLATE_FILENAME, + self.nfd_jinja2_template_path = ( + Path(__file__).parent + / "templates" + / CNF_DEFINITION_JINJA2_SOURCE_TEMPLATE_FILENAME ) - self.manifest_jinja2_template_path = os.path.join( - os.path.dirname(__file__), - "templates", - CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE_FILENAME, + self.manifest_jinja2_template_path = ( + Path(__file__).parent + / "templates" + / CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE_FILENAME ) - self.output_folder_name = self.config.build_output_folder_name + self.output_directory: Path = self.config.output_directory_for_build + self._cnfd_bicep_path = ( + self.output_directory / CNF_DEFINITION_BICEP_TEMPLATE_FILENAME + ) + self._tmp_dir: Optional[Path] = None self.artifacts = [] self.nf_application_configurations = [] self.deployment_parameter_schema = SCHEMA_PREFIX - - self._bicep_path = os.path.join( - self.output_folder_name, CNF_DEFINITION_BICEP_TEMPLATE_FILENAME - ) self.interactive = interactive - self._tmp_folder_name = "" def generate_nfd(self) -> None: """Generate a CNF NFD which comprises a group, an Artifact Manifest and an NFDV.""" - # Create temporary folder. + # Create temporary directory. with tempfile.TemporaryDirectory() as tmpdirname: - self._tmp_folder_name = tmpdirname + self._tmp_dir = Path(tmpdirname) try: for helm_package in self.config.helm_packages: - # Unpack the chart into the tmp folder - self._extract_chart(helm_package.path_to_chart) + # Unpack the chart into the tmp directory + self._extract_chart(Path(helm_package.path_to_chart)) # TODO: Validate charts @@ -99,7 +99,7 @@ def generate_nfd(self) -> None: self._generate_chart_value_mappings(helm_package) # Get schema for each chart - # (extract mappings and take the schema bits we need from values.schema.json) + # (extract mappings and relevant parts of the schema) # + Add that schema to the big schema. self.deployment_parameter_schema["properties"].update( self.get_chart_mapping_schema(helm_package) @@ -137,9 +137,9 @@ def generate_nfd(self) -> None: self.write_nfd_bicep_file() self.write_schema_to_file() self.write_manifest_bicep_file() - self.copy_to_output_folder() + self.copy_to_output_directory() print( - f"Generated NFD bicep template created in {self.output_folder_name}" + f"Generated NFD bicep template created in {self.output_directory}" ) print( "Please review these templates." @@ -151,30 +151,29 @@ def generate_nfd(self) -> None: raise e @property - def bicep_path(self) -> Optional[str]: + def nfd_bicep_path(self) -> Optional[Path]: """Returns the path to the bicep file for the NFD if it has been created.""" - if os.path.exists(self._bicep_path): - return self._bicep_path - + if self._cnfd_bicep_path.exists(): + return self._cnfd_bicep_path return None - def _extract_chart(self, path: str) -> None: + def _extract_chart(self, path: Path) -> None: """ - Extract the chart into the tmp folder. + Extract the chart into the tmp directory. :param path: The path to helm package """ logger.debug("Extracting helm package %s", path) - (_, ext) = os.path.splitext(path) - if ext in (".gz", ".tgz"): + file_extension = path.suffix + if file_extension in (".gz", ".tgz"): with tarfile.open(path, "r:gz") as tar: - tar.extractall(path=self._tmp_folder_name) + tar.extractall(path=self._tmp_dir) - elif ext == ".tar": + elif file_extension == ".tar": with tarfile.open(path, "r:") as tar: - tar.extractall(path=self._tmp_folder_name) + tar.extractall(path=self._tmp_dir) else: raise InvalidTemplateError( @@ -202,12 +201,10 @@ def _generate_chart_value_mappings(self, helm_package: HelmPackageConfig) -> Non ) # Write the mapping to a file - folder_name = os.path.join(self._tmp_folder_name, GENERATED_VALUES_MAPPINGS_DIR_NAME) - os.makedirs(folder_name, exist_ok=True) - mapping_filepath = os.path.join( - self._tmp_folder_name, - GENERATED_VALUES_MAPPINGS_DIR_NAME, - f"{helm_package.name}-generated-mapping.yaml", + mapping_directory: Path = self._tmp_dir / GENERATED_VALUES_MAPPINGS_DIR_NAME + mapping_directory.mkdir(exist_ok=True) + mapping_filepath = ( + mapping_directory / f"{helm_package.name}-generated-mapping.yaml" ) with open(mapping_filepath, "w", encoding="UTF-8") as mapping_file: yaml.dump(mapping_to_write, mapping_file) @@ -227,13 +224,9 @@ def _read_top_level_values_yaml( :return: A dictionary of the yaml read from the file :rtype: Dict[str, Any] """ - for file in os.listdir(os.path.join(self._tmp_folder_name, helm_package.name)): - if file in ("values.yaml", "values.yml"): - with open( - os.path.join(self._tmp_folder_name, helm_package.name, file), - "r", - encoding="UTF-8", - ) as values_file: + for file in Path(self._tmp_dir / helm_package.name).iterdir(): + if file.name in ("values.yaml", "values.yml"): + with file.open(encoding="UTF-8") as values_file: values_yaml = yaml.safe_load(values_file) return values_yaml @@ -253,7 +246,7 @@ def write_manifest_bicep_file(self) -> None: artifacts=self.artifacts, ) - path = os.path.join(self._tmp_folder_name, CNF_MANIFEST_BICEP_TEMPLATE_FILENAME) + path = self._tmp_dir / CNF_MANIFEST_BICEP_TEMPLATE_FILENAME with open(path, "w", encoding="utf-8") as f: f.write(bicep_contents) @@ -268,11 +261,11 @@ def write_nfd_bicep_file(self) -> None: ) bicep_contents: str = template.render( - deployParametersPath=os.path.join(SCHEMAS_DIR_NAME, DEPLOYMENT_PARAMETERS_FILENAME), + deployParametersPath=Path(SCHEMAS_DIR_NAME, DEPLOYMENT_PARAMETERS_FILENAME), nf_application_configurations=self.nf_application_configurations, ) - path = os.path.join(self._tmp_folder_name, CNF_DEFINITION_BICEP_TEMPLATE_FILENAME) + path = self._tmp_dir / CNF_DEFINITION_BICEP_TEMPLATE_FILENAME with open(path, "w", encoding="utf-8") as f: f.write(bicep_contents) @@ -283,67 +276,52 @@ def write_schema_to_file(self) -> None: logger.debug("Create deploymentParameters.json") - full_schema = os.path.join(self._tmp_folder_name, DEPLOYMENT_PARAMETERS_FILENAME) + full_schema = self._tmp_dir / DEPLOYMENT_PARAMETERS_FILENAME with open(full_schema, "w", encoding="UTF-8") as f: json.dump(self.deployment_parameter_schema, f, indent=4) logger.debug("%s created", full_schema) - def copy_to_output_folder(self) -> None: - """Copy the config mappings, schema and bicep templates (artifact manifest and NFDV) to the output folder.""" - - logger.info("Create NFD bicep %s", self.output_folder_name) + def copy_to_output_directory(self) -> None: + """Copy the config mappings, schema and bicep templates (artifact manifest and NFDV) to the output directory.""" - os.mkdir(self.output_folder_name) - os.mkdir(os.path.join(self.output_folder_name, SCHEMAS_DIR_NAME)) + logger.info("Create NFD bicep %s", self.output_directory) - # Copy the nfd and the manifest bicep files to the output folder - tmp_nfd_bicep_path = os.path.join( - self._tmp_folder_name, CNF_DEFINITION_BICEP_TEMPLATE_FILENAME + Path(self.output_directory / SCHEMAS_DIR_NAME).mkdir( + parents=True, exist_ok=True ) - shutil.copy(tmp_nfd_bicep_path, self.output_folder_name) - tmp_manifest_bicep_path = os.path.join( - self._tmp_folder_name, CNF_MANIFEST_BICEP_TEMPLATE_FILENAME + # Copy the nfd and the manifest bicep files to the output directory + shutil.copy( + self._tmp_dir / CNF_DEFINITION_BICEP_TEMPLATE_FILENAME, + self.output_directory, + ) + shutil.copy( + self._tmp_dir / CNF_MANIFEST_BICEP_TEMPLATE_FILENAME, self.output_directory ) - shutil.copy(tmp_manifest_bicep_path, self.output_folder_name) - # Copy any generated values mappings YAML files to the corresponding folder in + # Copy any generated values mappings YAML files to the corresponding directory in # the output directory so that the user can edit them and re-run the build if # required - if os.path.exists( - os.path.join(self._tmp_folder_name, GENERATED_VALUES_MAPPINGS_DIR_NAME) - ): - generated_mappings_path = os.path.join( - self.output_folder_name, GENERATED_VALUES_MAPPINGS_DIR_NAME - ) + if Path(self._tmp_dir / GENERATED_VALUES_MAPPINGS_DIR_NAME).exists(): shutil.copytree( - os.path.join(self._tmp_folder_name, GENERATED_VALUES_MAPPINGS_DIR_NAME), - generated_mappings_path, + self._tmp_dir / GENERATED_VALUES_MAPPINGS_DIR_NAME, + self.output_directory / GENERATED_VALUES_MAPPINGS_DIR_NAME, ) # Copy the JSON config mappings and deploymentParameters schema that are used - # for the NFD to the output folder - tmp_config_mappings_path = os.path.join(self._tmp_folder_name, CONFIG_MAPPINGS_DIR_NAME) - output_config_mappings_path = os.path.join( - self.output_folder_name, CONFIG_MAPPINGS_DIR_NAME - ) + # for the NFD to the output directory shutil.copytree( - tmp_config_mappings_path, - output_config_mappings_path, + self._tmp_dir / CONFIG_MAPPINGS_DIR_NAME, + self.output_directory / CONFIG_MAPPINGS_DIR_NAME, dirs_exist_ok=True, ) - - tmp_schema_path = os.path.join(self._tmp_folder_name, DEPLOYMENT_PARAMETERS_FILENAME) - output_schema_path = os.path.join( - self.output_folder_name, SCHEMAS_DIR_NAME, DEPLOYMENT_PARAMETERS_FILENAME - ) shutil.copy( - tmp_schema_path, - output_schema_path, + self._tmp_dir / DEPLOYMENT_PARAMETERS_FILENAME, + self.output_directory / SCHEMAS_DIR_NAME / DEPLOYMENT_PARAMETERS_FILENAME, ) - logger.info("Copied files to %s", self.output_folder_name) + logger.info("Copied files to %s", self.output_directory) def generate_nf_application_config( self, @@ -368,16 +346,14 @@ def generate_nf_application_config( } @staticmethod - def _find_yaml_files(directory) -> Iterator[str]: + def _find_yaml_files(directory: Path) -> Iterator[str]: """ - Find all yaml files in given directory. + Find all yaml files recursively in given directory. :param directory: The directory to search. """ - for root, _, files in os.walk(directory): - for file in files: - if file.endswith(".yaml") or file.endswith(".yml"): - yield os.path.join(root, file) + yield from directory.glob("**/*.yaml") + yield from directory.glob("**/*.yml") def find_pattern_matches_in_chart( self, helm_package: HelmPackageConfig, start_string: str @@ -395,7 +371,7 @@ def find_pattern_matches_in_chart( paths and the name and version of the image. e.g. (Values.foo.bar.repoPath, foo, 1.2.3) """ - chart_dir = os.path.join(self._tmp_folder_name, helm_package.name) + chart_dir = self._tmp_dir / helm_package.name matches = [] path = [] @@ -413,14 +389,14 @@ def find_pattern_matches_in_chart( ) logger.debug( "Regex match for name and version is %s", - name_and_version - ) + name_and_version, + ) if name_and_version and len(name_and_version.groups()) == 2: logger.debug( "Found image name and version %s %s", name_and_version.group(1), - name_and_version.group(2) + name_and_version.group(2), ) matches.append( ( @@ -468,7 +444,7 @@ def get_chart_mapping_schema( ) -> Dict[Any, Any]: """ Get the schema for the non default values (those with {deploymentParameter...}). - Based on user provided values.schema.json. + Based on the user provided values schema. param helm_package: The helm package config. """ @@ -476,18 +452,16 @@ def get_chart_mapping_schema( logger.debug("Get chart mapping schema for %s", helm_package.name) mappings_path = helm_package.path_to_mappings - values_schema = os.path.join( - self._tmp_folder_name, helm_package.name, "values.schema.json" - ) - if not os.path.exists(mappings_path): + values_schema = self._tmp_dir / helm_package.name / CNF_VALUES_SCHEMA_FILENAME + if not Path(mappings_path).exists(): raise InvalidTemplateError( f"ERROR: The helm package '{helm_package.name}' does not have a valid values" " mappings file. The file at '{helm_package.path_to_mappings}' does not exist." "\nPlease fix this and run the command again." ) - if not os.path.exists(values_schema): + if not values_schema.exists(): raise InvalidTemplateError( - f"ERROR: The helm package '{helm_package.name}' is missing values.schema.json." + f"ERROR: The helm package '{helm_package.name}' is missing {CNF_VALUES_SCHEMA_FILENAME}." "\nPlease fix this and run the command again." ) @@ -515,9 +489,8 @@ def get_chart_mapping_schema( @staticmethod def traverse_dict( - dict_to_search: Dict[Any, Any], - target_regex: str - ) -> Dict[str, List[str]]: + dict_to_search: Dict[Any, Any], target_regex: str + ) -> Dict[str, List[str]]: """ Traverse the dictionary that is loaded from the file provided by path_to_mappings in the input.json. @@ -563,15 +536,14 @@ def traverse_dict( "at path %s, which this tool cannot parse. " "Please check the output configMappings and schemas " "files and check that they are as required.", - path + [k] + path + [k], ) return result @staticmethod def search_schema( - deployParams_paths: Dict[str, List[str]], - full_schema - ) -> Dict[str, Dict[str, str]]: + deployParams_paths: Dict[str, List[str]], full_schema + ) -> Dict[str, Dict[str, str]]: """ Search through provided schema for the types of the deployment parameters. This assumes that the type of the key will be the type of the deployment parameter. @@ -581,7 +553,7 @@ def search_schema( {"foo": {"type": "string"}, "bar": {"type": "string"}} param deployParams_paths: a dictionary of all the deploy parameters to search for, - with the key being the deploy parameter and the value being the + with the key being the deploy parameter and the value being the path to the value. e.g. {"foo": ["global", "foo", "bar"]} param full_schema: The schema to search through. @@ -591,13 +563,14 @@ def search_schema( for deploy_param, path_list in deployParams_paths.items(): logger.debug( "Searching for %s in schema at path %s", deploy_param, path_list - ) + ) node = full_schema for path in path_list: if "properties" in node.keys(): logger.debug( "Searching properties for %s in schema at path %s", - deploy_param, path + deploy_param, + path, ) node = node["properties"][path] else: @@ -613,7 +586,8 @@ def search_schema( logger.warning( "We default these parameters to type string. " "Please edit schemas/%s in the output before publishing " - "if this is wrong", DEPLOYMENT_PARAMETERS + "if this is wrong", + DEPLOYMENT_PARAMETERS_FILENAME, ) return new_schema @@ -697,15 +671,15 @@ def get_chart_name_and_version( self, helm_package: HelmPackageConfig ) -> Tuple[str, str]: """Get the name and version of the chart.""" - chart = os.path.join(self._tmp_folder_name, helm_package.name, "Chart.yaml") + chart_path = self._tmp_dir / helm_package.name / "Chart.yaml" - if not os.path.exists(chart): + if not chart_path.exists(): raise InvalidTemplateError( f"There is no Chart.yaml file in the helm package '{helm_package.name}'. " "\nPlease fix this and run the command again." ) - with open(chart, "r", encoding="utf-8") as f: + with open(chart_path, "r", encoding="utf-8") as f: data = yaml.load(f, Loader=yaml.FullLoader) if "name" in data and "version" in data: chart_name = data["name"] @@ -719,23 +693,19 @@ def get_chart_name_and_version( return (chart_name, chart_version) - def jsonify_value_mappings(self, helm_package: HelmPackageConfig) -> str: + def jsonify_value_mappings(self, helm_package: HelmPackageConfig) -> Path: """Yaml->JSON values mapping file, then return path to it.""" - mappings_yaml = helm_package.path_to_mappings - - mappings_folder_path = os.path.join(self._tmp_folder_name, CONFIG_MAPPINGS_DIR_NAME) - mappings_filename = f"{helm_package.name}-mappings.json" - - if not os.path.exists(mappings_folder_path): - os.mkdir(mappings_folder_path) + mappings_yaml_file = helm_package.path_to_mappings + mappings_dir = self._tmp_dir / CONFIG_MAPPINGS_DIR_NAME + mappings_output_file = mappings_dir / f"{helm_package.name}-mappings.json" - mapping_file_path = os.path.join(mappings_folder_path, mappings_filename) + mappings_dir.mkdir(exist_ok=True) - with open(mappings_yaml, "r", encoding="utf-8") as f: + with open(mappings_yaml_file, "r", encoding="utf-8") as f: data = yaml.load(f, Loader=yaml.FullLoader) - with open(mapping_file_path, "w", encoding="utf-8") as file: + with open(mappings_output_file, "w", encoding="utf-8") as file: json.dump(data, file, indent=4) logger.debug("Generated parameter mappings for %s", helm_package.name) - return os.path.join(CONFIG_MAPPINGS_DIR_NAME, mappings_filename) + return Path(CONFIG_MAPPINGS_DIR_NAME, f"{helm_package.name}-mappings.json") diff --git a/src/aosm/azext_aosm/generate_nfd/nfd_generator_base.py b/src/aosm/azext_aosm/generate_nfd/nfd_generator_base.py index a57604f3009..e155950c03d 100644 --- a/src/aosm/azext_aosm/generate_nfd/nfd_generator_base.py +++ b/src/aosm/azext_aosm/generate_nfd/nfd_generator_base.py @@ -3,7 +3,10 @@ # License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------- """Contains a base class for generating NFDs.""" -from abc import ABC +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Optional + from knack.log import get_logger logger = get_logger(__name__) @@ -12,7 +15,11 @@ class NFDGenerator(ABC): """A class for generating an NFD from a config file.""" - # pylint: disable=too-few-public-methods + @abstractmethod def generate_nfd(self) -> None: - """No-op on base class.""" - raise NotImplementedError + ... + + @property + @abstractmethod + def nfd_bicep_path(self) -> Optional[Path]: + ... diff --git a/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py b/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py index ddea2a21b19..98dd6a587e0 100644 --- a/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py +++ b/src/aosm/azext_aosm/generate_nfd/vnf_nfd_generator.py @@ -5,7 +5,6 @@ """Contains a class for generating VNF NFDs and associated resources.""" import json -import os import shutil import tempfile from functools import cached_property @@ -67,8 +66,12 @@ def __init__(self, config: VNFConfiguration, order_params: bool, interactive: bo self.arm_template_path = Path(self.config.arm_template.file_path) self.output_directory: Path = self.config.output_directory_for_build - self._vnfd_bicep_path = Path(self.output_directory, VNF_DEFINITION_BICEP_TEMPLATE_FILENAME) - self._manifest_bicep_path = Path(self.output_directory, VNF_MANIFEST_BICEP_TEMPLATE_FILENAME) + self._vnfd_bicep_path = Path( + self.output_directory, VNF_DEFINITION_BICEP_TEMPLATE_FILENAME + ) + self._manifest_bicep_path = Path( + self.output_directory, VNF_MANIFEST_BICEP_TEMPLATE_FILENAME + ) self.order_params = order_params self.interactive = interactive self._tmp_dir: Optional[Path] = None @@ -93,7 +96,7 @@ def generate_nfd(self) -> None: ) @property - def vnfd_bicep_path(self) -> Optional[Path]: + def nfd_bicep_path(self) -> Optional[Path]: """Returns the path to the bicep file for the NFD if it has been created.""" if self._vnfd_bicep_path.exists(): return self._vnfd_bicep_path @@ -225,7 +228,9 @@ def write_deployment_parameters(self, directory: Path) -> None: # Extra output file to help the user know which parameters are optional if not self.interactive: if nfd_parameters_with_default: - optional_deployment_parameters_path = directory / OPTIONAL_DEPLOYMENT_PARAMETERS_FILENAME + optional_deployment_parameters_path = ( + directory / OPTIONAL_DEPLOYMENT_PARAMETERS_FILENAME + ) with open( optional_deployment_parameters_path, "w", encoding="utf-8" ) as _file: @@ -297,10 +302,14 @@ def _copy_to_output_directory(self) -> None: static_bicep_templates_dir = Path(__file__).parent / "templates" - static_vnfd_bicep_path = static_bicep_templates_dir / VNF_DEFINITION_BICEP_TEMPLATE_FILENAME + static_vnfd_bicep_path = ( + static_bicep_templates_dir / VNF_DEFINITION_BICEP_TEMPLATE_FILENAME + ) shutil.copy(static_vnfd_bicep_path, self.output_directory) - static_manifest_bicep_path = static_bicep_templates_dir / VNF_MANIFEST_BICEP_TEMPLATE_FILENAME + static_manifest_bicep_path = ( + static_bicep_templates_dir / VNF_MANIFEST_BICEP_TEMPLATE_FILENAME + ) shutil.copy(static_manifest_bicep_path, self.output_directory) # Copy everything in the temp directory to the output directory shutil.copytree( diff --git a/src/aosm/azext_aosm/util/constants.py b/src/aosm/azext_aosm/util/constants.py index 072940815b2..2365a3b88bf 100644 --- a/src/aosm/azext_aosm/util/constants.py +++ b/src/aosm/azext_aosm/util/constants.py @@ -35,6 +35,7 @@ CNF_MANIFEST_JINJA2_SOURCE_TEMPLATE_FILENAME = "cnfartifactmanifest.bicep.j2" CNF_DEFINITION_BICEP_TEMPLATE_FILENAME = "cnfdefinition.bicep" CNF_MANIFEST_BICEP_TEMPLATE_FILENAME = "cnfartifactmanifest.bicep" +CNF_VALUES_SCHEMA_FILENAME = "values.schema.json" # Names of directories used in the repo @@ -84,4 +85,4 @@ SOURCE_ACR_REGEX = ( r".*\/resourceGroups\/([^\/]*)\/providers\/Microsoft." r"ContainerRegistry\/registries\/([^\/]*)" - ) +) From 678a196f421df43b7aad344bdec22e9c8bb3e1cd Mon Sep 17 00:00:00 2001 From: Andy Churchard Date: Tue, 4 Jul 2023 12:20:57 +0100 Subject: [PATCH 6/7] Rename methods as private if they rely on other private methods / properties --- .../generate_nfd/cnf_nfd_generator.py | 60 +++++++++---------- 1 file changed, 29 insertions(+), 31 deletions(-) diff --git a/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py b/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py index c25a53e42f2..7dc5c8ccc8f 100644 --- a/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py +++ b/src/aosm/azext_aosm/generate_nfd/cnf_nfd_generator.py @@ -102,12 +102,12 @@ def generate_nfd(self) -> None: # (extract mappings and relevant parts of the schema) # + Add that schema to the big schema. self.deployment_parameter_schema["properties"].update( - self.get_chart_mapping_schema(helm_package) + self._get_chart_mapping_schema(helm_package) ) # Get all image line matches for files in the chart. # Do this here so we don't have to do it multiple times. - image_line_matches = self.find_pattern_matches_in_chart( + image_line_matches = self._find_pattern_matches_in_chart( helm_package, IMAGE_START_STRING ) # Creates a flattened list of image registry paths to prevent set error @@ -118,26 +118,26 @@ def generate_nfd(self) -> None: # Generate the NF application configuration for the chart # passed to jinja2 renderer to render bicep template self.nf_application_configurations.append( - self.generate_nf_application_config( + self._generate_nf_application_config( helm_package, image_registry_paths, - self.find_pattern_matches_in_chart( + self._find_pattern_matches_in_chart( helm_package, IMAGE_PULL_SECRETS_START_STRING ), ) ) # Workout the list of artifacts for the chart and # update the list for the NFD with any unique artifacts. - chart_artifacts = self.get_artifact_list( + chart_artifacts = self._get_artifact_list( helm_package, image_line_matches ) self.artifacts += [ a for a in chart_artifacts if a not in self.artifacts ] - self.write_nfd_bicep_file() - self.write_schema_to_file() - self.write_manifest_bicep_file() - self.copy_to_output_directory() + self._write_nfd_bicep_file() + self._write_schema_to_file() + self._write_manifest_bicep_file() + self._copy_to_output_directory() print( f"Generated NFD bicep template created in {self.output_directory}" ) @@ -234,8 +234,8 @@ def _read_top_level_values_yaml( "Cannot find top level values.yaml/.yml file in Helm package." ) - def write_manifest_bicep_file(self) -> None: - """Write the bicep file for the Artifact Manifest.""" + def _write_manifest_bicep_file(self) -> None: + """Write the bicep file for the Artifact Manifest to the temp directory.""" with open(self.manifest_jinja2_template_path, "r", encoding="UTF-8") as f: template: Template = Template( f.read(), @@ -252,8 +252,8 @@ def write_manifest_bicep_file(self) -> None: logger.info("Created artifact manifest bicep template: %s", path) - def write_nfd_bicep_file(self) -> None: - """Write the bicep file for the NFD.""" + def _write_nfd_bicep_file(self) -> None: + """Write the bicep file for the NFD to the temp directory.""" with open(self.nfd_jinja2_template_path, "r", encoding="UTF-8") as f: template: Template = Template( f.read(), @@ -271,8 +271,8 @@ def write_nfd_bicep_file(self) -> None: logger.info("Created NFD bicep template: %s", path) - def write_schema_to_file(self) -> None: - """Write the schema to file deploymentParameters.json.""" + def _write_schema_to_file(self) -> None: + """Write the schema to file deploymentParameters.json to the temp directory.""" logger.debug("Create deploymentParameters.json") @@ -282,8 +282,8 @@ def write_schema_to_file(self) -> None: logger.debug("%s created", full_schema) - def copy_to_output_directory(self) -> None: - """Copy the config mappings, schema and bicep templates (artifact manifest and NFDV) to the output directory.""" + def _copy_to_output_directory(self) -> None: + """Copy the config mappings, schema and bicep templates (artifact manifest and NFDV) from the temp directory to the output directory.""" logger.info("Create NFD bicep %s", self.output_directory) @@ -323,14 +323,14 @@ def copy_to_output_directory(self) -> None: logger.info("Copied files to %s", self.output_directory) - def generate_nf_application_config( + def _generate_nf_application_config( self, helm_package: HelmPackageConfig, image_registry_path: List[str], image_pull_secret_line_matches: List[Tuple[str, ...]], ) -> Dict[str, Any]: """Generate NF application config.""" - (name, version) = self.get_chart_name_and_version(helm_package) + (name, version) = self._get_chart_name_and_version(helm_package) registry_values_paths = set(image_registry_path) image_pull_secrets_values_paths = set(image_pull_secret_line_matches) @@ -342,7 +342,7 @@ def generate_nf_application_config( "dependsOnProfile": helm_package.depends_on, "registryValuesPaths": list(registry_values_paths), "imagePullSecretsValuesPaths": list(image_pull_secrets_values_paths), - "valueMappingsPath": self.jsonify_value_mappings(helm_package), + "valueMappingsPath": self._jsonify_value_mappings(helm_package), } @staticmethod @@ -355,7 +355,7 @@ def _find_yaml_files(directory: Path) -> Iterator[str]: yield from directory.glob("**/*.yaml") yield from directory.glob("**/*.yml") - def find_pattern_matches_in_chart( + def _find_pattern_matches_in_chart( self, helm_package: HelmPackageConfig, start_string: str ) -> List[Tuple[str, ...]]: """ @@ -411,7 +411,7 @@ def find_pattern_matches_in_chart( matches += path return matches - def get_artifact_list( + def _get_artifact_list( self, helm_package: HelmPackageConfig, image_line_matches: List[Tuple[str, ...]], @@ -423,7 +423,7 @@ def get_artifact_list( :param image_line_matches: The list of image line matches. """ artifact_list = [] - (chart_name, chart_version) = self.get_chart_name_and_version(helm_package) + (chart_name, chart_version) = self._get_chart_name_and_version(helm_package) helm_artifact = { "name": chart_name, "version": chart_version, @@ -439,7 +439,7 @@ def get_artifact_list( return artifact_list - def get_chart_mapping_schema( + def _get_chart_mapping_schema( self, helm_package: HelmPackageConfig ) -> Dict[Any, Any]: """ @@ -492,10 +492,8 @@ def traverse_dict( dict_to_search: Dict[Any, Any], target_regex: str ) -> Dict[str, List[str]]: """ - Traverse the dictionary that is loaded from the file provided by path_to_mappings in the input.json. - - Returns a dictionary of all the values that match the target regex, - with the key being the deploy parameter and the value being the path to the value. + Traverse the dictionary provided and return a dictionary of all the values that match the target regex, + with the key being the deploy parameter and the value being the path (as a list) to the value. e.g. {"foo": ["global", "foo", "bar"]} :param d: The dictionary to traverse. @@ -545,7 +543,7 @@ def search_schema( deployParams_paths: Dict[str, List[str]], full_schema ) -> Dict[str, Dict[str, str]]: """ - Search through provided schema for the types of the deployment parameters. + Search through the provided schema for the types of the deployment parameters. This assumes that the type of the key will be the type of the deployment parameter. e.g. if foo: {deployParameter.bar} and foo is type string, then bar is type string. @@ -667,7 +665,7 @@ def _replace_values_with_deploy_params( return final_values_mapping_dict - def get_chart_name_and_version( + def _get_chart_name_and_version( self, helm_package: HelmPackageConfig ) -> Tuple[str, str]: """Get the name and version of the chart.""" @@ -693,7 +691,7 @@ def get_chart_name_and_version( return (chart_name, chart_version) - def jsonify_value_mappings(self, helm_package: HelmPackageConfig) -> Path: + def _jsonify_value_mappings(self, helm_package: HelmPackageConfig) -> Path: """Yaml->JSON values mapping file, then return path to it.""" mappings_yaml_file = helm_package.path_to_mappings mappings_dir = self._tmp_dir / CONFIG_MAPPINGS_DIR_NAME From 1e4de53df225501f910e05431942b71bdc020d01 Mon Sep 17 00:00:00 2001 From: Andy Churchard Date: Tue, 4 Jul 2023 12:55:32 +0100 Subject: [PATCH 7/7] Fix dodgy merge --- src/aosm/azext_aosm/deploy/artifact.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/aosm/azext_aosm/deploy/artifact.py b/src/aosm/azext_aosm/deploy/artifact.py index c3288008199..614b38d5784 100644 --- a/src/aosm/azext_aosm/deploy/artifact.py +++ b/src/aosm/azext_aosm/deploy/artifact.py @@ -99,12 +99,6 @@ def _upload_helm_to_acr(self, artifact_config: HelmPackageConfig) -> None: logout_command = ["docker", "logout", registry] subprocess.run(logout_command, check=True) - # If we don't logout from the registry, future Artifact uploads to this ACR - # will fail with an UNAUTHORIZED error. There is no az acr logout command, but - # it is a wrapper around docker, so a call to docker logout will work. - logout_command = ["docker", "logout", registry] - subprocess.run(logout_command, check=True) - def _upload_to_storage_account(self, artifact_config: ArtifactConfig) -> None: """ Upload artifact to storage account.