diff --git a/config/config_mgmt.py b/config/config_mgmt.py index 9b2021bef0..4e34a7ae00 100644 --- a/config/config_mgmt.py +++ b/config/config_mgmt.py @@ -2,8 +2,11 @@ config_mgmt.py provides classes for configuration validation and for Dynamic Port Breakout. ''' + +import os import re import syslog +import yang as ly from json import load from sys import flags from time import sleep as tsleep @@ -46,27 +49,14 @@ def __init__(self, source="configDB", debug=False, allowTablesWithoutYang=True): try: self.configdbJsonIn = None self.configdbJsonOut = None + self.source = source self.allowTablesWithoutYang = allowTablesWithoutYang # logging vars self.SYSLOG_IDENTIFIER = "ConfigMgmt" self.DEBUG = debug - self.sy = sonic_yang.SonicYang(YANG_DIR, debug=debug) - # load yang models - self.sy.loadYangModel() - # load jIn from config DB or from config DB json file. - if source.lower() == 'configdb': - self.readConfigDB() - # treat any other source as file input - else: - self.readConfigDBJson(source) - # this will crop config, xlate and load. - self.sy.loadData(self.configdbJsonIn) - - # Raise if tables without YANG models are not allowed but exist. - if not allowTablesWithoutYang and len(self.sy.tablesWithOutYang): - raise Exception('Config has tables without YANG models') + self.__init_sonic_yang() except Exception as e: self.sysLog(doPrint=True, logLevel=syslog.LOG_ERR, msg=str(e)) @@ -74,6 +64,23 @@ def __init__(self, source="configDB", debug=False, allowTablesWithoutYang=True): return + def __init_sonic_yang(self): + self.sy = sonic_yang.SonicYang(YANG_DIR, debug=self.DEBUG) + # load yang models + self.sy.loadYangModel() + # load jIn from config DB or from config DB json file. + if self.source.lower() == 'configdb': + self.readConfigDB() + # treat any other source as file input + else: + self.readConfigDBJson(self.source) + # this will crop config, xlate and load. + self.sy.loadData(self.configdbJsonIn) + + # Raise if tables without YANG models are not allowed but exist. + if not self.allowTablesWithoutYang and len(self.sy.tablesWithOutYang): + raise Exception('Config has tables without YANG models') + def __del__(self): pass @@ -213,6 +220,70 @@ def writeConfigDB(self, jDiff): return + def add_module(self, yang_module_str, replace_if_exists=False): + """ + Validate and add new YANG module to the system. + + Parameters: + yang_module_str (str): YANG module in string representation. + + Returns: + None + """ + + module_name = self.get_module_name(yang_module_str) + module_path = os.path.join(YANG_DIR, '{}.yang'.format(module_name)) + if os.path.exists(module_path) and not replace_if_exists: + raise Exception('{} already exists'.format(module_name)) + with open(module_path, 'w') as module_file: + module_file.write(yang_module_str) + try: + self.__init_sonic_yang() + except Exception: + os.remove(module_path) + raise + + def remove_module(self, module_name): + """ + Remove YANG module on the system and validate. + + Parameters: + module_name (str): YANG module name. + + Returns: + None + """ + + module_path = os.path.join(YANG_DIR, '{}.yang'.format(module_name)) + if not os.path.exists(module_path): + return + with open(module_path, 'r') as module_file: + yang_module_str = module_file.read() + try: + os.remove(module_path) + self.__init_sonic_yang() + except Exception: + self.add_module(yang_module_str) + raise + + @staticmethod + def get_module_name(yang_module_str): + """ + Read yangs module name from yang_module_str + + Parameters: + yang_module_str(str): YANG module string. + + Returns: + str: Module name + """ + + # Instantiate new context since parse_module_mem() loads the module into context. + sy = sonic_yang.SonicYang(YANG_DIR) + module = sy.ctx.parse_module_mem(yang_module_str, ly.LYS_IN_YANG) + return module.name() + + # End of Class ConfigMgmt class ConfigMgmtDPB(ConfigMgmt): @@ -417,8 +488,8 @@ def _deletePorts(self, ports=list(), force=False): deps.extend(dep) # No further action with no force and deps exist - if force == False and deps: - return configToLoad, deps, False; + if not force and deps: + return configToLoad, deps, False # delets all deps, No topological sort is needed as of now, if deletion # of deps fails, return immediately @@ -436,8 +507,8 @@ def _deletePorts(self, ports=list(), force=False): self.sy.deleteNode(str(xPathPort)) # Let`s Validate the tree now - if self.validateConfigData()==False: - return configToLoad, deps, False; + if not self.validateConfigData(): + return configToLoad, deps, False # All great if we are here, Lets get the diff self.configdbJsonOut = self.sy.getData() diff --git a/sonic_package_manager/constraint.py b/sonic_package_manager/constraint.py index af5a13000b..70b7165354 100644 --- a/sonic_package_manager/constraint.py +++ b/sonic_package_manager/constraint.py @@ -46,7 +46,7 @@ def parse(constraints: Dict) -> 'ComponentConstraints': """ components = {component: VersionConstraint.parse(version) - for component, version in constraints.items()} + for component, version in constraints.items()} return ComponentConstraints(components) def deparse(self) -> Dict[str, str]: diff --git a/sonic_package_manager/dockerapi.py b/sonic_package_manager/dockerapi.py index 926600d0bc..7f051d2d72 100644 --- a/sonic_package_manager/dockerapi.py +++ b/sonic_package_manager/dockerapi.py @@ -186,6 +186,15 @@ def rm(self, container: str, **kwargs): self.client.containers.get(container).remove(**kwargs) log.debug(f'removed container {container}') + def rm_by_ancestor(self, image_id: str, **kwargs): + """ Docker 'rm' command for running containers instantiated + from image passed to this function. """ + + # Clean containers based on the old image + containers = self.ps(filters={'ancestor': image_id}, all=True) + for container in containers: + self.rm(container.id, **kwargs) + def ps(self, **kwargs): """ Docker 'ps' command. """ diff --git a/sonic_package_manager/errors.py b/sonic_package_manager/errors.py index 17279c52c4..fe4de39a39 100644 --- a/sonic_package_manager/errors.py +++ b/sonic_package_manager/errors.py @@ -143,4 +143,3 @@ class PackageComponentConflictError(PackageInstallationError): def __str__(self): return (f'Package {self.name} conflicts with {self.component} {self.constraint} ' f'in package {self.dependency} but version {self.installed_ver} is installed') - diff --git a/sonic_package_manager/main.py b/sonic_package_manager/main.py index c0589ae5b5..8a0aabb901 100644 --- a/sonic_package_manager/main.py +++ b/sonic_package_manager/main.py @@ -361,7 +361,7 @@ def install(ctx, package_source = package_expr or from_repository or from_tarball if not package_source: - exit_cli(f'Package source is not specified', fg='red') + exit_cli('Package source is not specified', fg='red') if not yes and not force: click.confirm(f'{package_source} is going to be installed, ' @@ -386,7 +386,7 @@ def install(ctx, except Exception as err: exit_cli(f'Failed to install {package_source}: {err}', fg='red') except KeyboardInterrupt: - exit_cli(f'Operation canceled by user', fg='red') + exit_cli('Operation canceled by user', fg='red') @cli.command() @@ -409,15 +409,16 @@ def reset(ctx, name, force, yes, skip_host_plugins): except Exception as err: exit_cli(f'Failed to reset package {name}: {err}', fg='red') except KeyboardInterrupt: - exit_cli(f'Operation canceled by user', fg='red') + exit_cli('Operation canceled by user', fg='red') @cli.command() @add_options(PACKAGE_COMMON_OPERATION_OPTIONS) +@click.option('--keep-config', is_flag=True, help='Keep features configuration in CONFIG DB.') @click.argument('name') @click.pass_context @root_privileges_required -def uninstall(ctx, name, force, yes): +def uninstall(ctx, name, force, yes, keep_config): """ Uninstall package. """ manager: PackageManager = ctx.obj @@ -426,12 +427,17 @@ def uninstall(ctx, name, force, yes): click.confirm(f'Package {name} is going to be uninstalled, ' f'continue?', abort=True, show_default=True) + uninstall_opts = { + 'force': force, + 'keep_config': keep_config, + } + try: - manager.uninstall(name, force) + manager.uninstall(name, **uninstall_opts) except Exception as err: exit_cli(f'Failed to uninstall package {name}: {err}', fg='red') except KeyboardInterrupt: - exit_cli(f'Operation canceled by user', fg='red') + exit_cli('Operation canceled by user', fg='red') @cli.command() @@ -453,7 +459,7 @@ def migrate(ctx, database, force, yes, dockerd_socket): except Exception as err: exit_cli(f'Failed to migrate packages {err}', fg='red') except KeyboardInterrupt: - exit_cli(f'Operation canceled by user', fg='red') + exit_cli('Operation canceled by user', fg='red') if __name__ == "__main__": diff --git a/sonic_package_manager/manager.py b/sonic_package_manager/manager.py index 3caf90d95f..836a992f0a 100644 --- a/sonic_package_manager/manager.py +++ b/sonic_package_manager/manager.py @@ -10,8 +10,11 @@ import docker import filelock +from config import config_mgmt from sonic_py_common import device_info +from sonic_cli_gen.generator import CliGenerator + from sonic_package_manager import utils from sonic_package_manager.constraint import ( VersionConstraint, @@ -39,12 +42,16 @@ from sonic_package_manager.progress import ProgressManager from sonic_package_manager.reference import PackageReference from sonic_package_manager.registry import RegistryResolver +from sonic_package_manager.service_creator import SONIC_CLI_COMMANDS from sonic_package_manager.service_creator.creator import ( ServiceCreator, run_command ) from sonic_package_manager.service_creator.feature import FeatureRegistry -from sonic_package_manager.service_creator.sonic_db import SonicDB +from sonic_package_manager.service_creator.sonic_db import ( + INIT_CFG_JSON, + SonicDB +) from sonic_package_manager.service_creator.utils import in_chroot from sonic_package_manager.source import ( PackageSource, @@ -52,7 +59,6 @@ RegistrySource, TarballSource ) -from sonic_package_manager.utils import DockerReference from sonic_package_manager.version import ( Version, VersionRange, @@ -102,7 +108,7 @@ def wrapped_function(*args, **kwargs): return wrapped_function -def rollback(func, *args, **kwargs): +def rollback(func, *args, **kwargs) -> Callable: """ Used in rollback callbacks to ignore failure but proceed with rollback. Error will be printed but not fail the whole procedure of rollback. """ @@ -131,7 +137,7 @@ def package_constraint_to_reference(constraint: PackageConstraint) -> PackageRef return PackageReference(package_name, version_to_tag(version_constraint)) -def parse_reference_expression(expression): +def parse_reference_expression(expression) -> PackageReference: try: return package_constraint_to_reference(PackageConstraint.parse(expression)) except ValueError: @@ -140,6 +146,36 @@ def parse_reference_expression(expression): return PackageReference.parse(expression) +def get_cli_plugin_directory(command: str) -> str: + """ Returns a plugins package directory for command group. + + Args: + command: SONiC command: "show"/"config"/"clear". + Returns: + Path to plugins package directory. + """ + + pkg_loader = pkgutil.get_loader(f'{command}.plugins') + if pkg_loader is None: + raise PackageManagerError(f'Failed to get plugins path for {command} CLI') + plugins_pkg_path = os.path.dirname(pkg_loader.path) + return plugins_pkg_path + + +def get_cli_plugin_path(package: Package, command: str) -> str: + """ Returns a path where to put CLI plugin code. + + Args: + package: Package to generate this path for. + command: SONiC command: "show"/"config"/"clear". + Returns: + Path generated for this package. + """ + + plugin_module_file = package.name + '.py' + return os.path.join(get_cli_plugin_directory(command), plugin_module_file) + + def validate_package_base_os_constraints(package: Package, sonic_version_info: Dict[str, str]): """ Verify that all dependencies on base OS components are met. Args: @@ -217,11 +253,10 @@ def validate_package_tree(packages: Dict[str, Package]): continue component_version = conflicting_package.components[component] - log.debug(f'conflicting package {dependency.name}: ' + log.debug(f'conflicting package {conflict.name}: ' f'component {component} version is {component_version}') - if constraint.allows_all(component_version): - raise PackageComponentConflictError(package.name, dependency, component, + raise PackageComponentConflictError(package.name, conflict, component, constraint, component_version) @@ -367,12 +402,17 @@ def install_from_source(self, if not self.database.has_package(package.name): self.database.add_package(package.name, package.repository) + service_create_opts = { + 'state': feature_state, + 'owner': default_owner, + } + try: with contextlib.ExitStack() as exits: source.install(package) exits.callback(rollback(source.uninstall, package)) - self.service_creator.create(package, state=feature_state, owner=default_owner) + self.service_creator.create(package, **service_create_opts) exits.callback(rollback(self.service_creator.remove, package)) self.service_creator.generate_shutdown_sequence_files( @@ -400,13 +440,16 @@ def install_from_source(self, @under_lock @opt_check - def uninstall(self, name: str, force=False): + def uninstall(self, name: str, + force: bool = False, + keep_config: bool = False): """ Uninstall SONiC Package referenced by name. The uninstallation can be forced if force argument is True. Args: name: SONiC Package name. force: Force the installation. + keep_config: Keep feature configuration in databases. Raises: PackageManagerError """ @@ -436,17 +479,11 @@ def uninstall(self, name: str, force=False): try: self._uninstall_cli_plugins(package) - self.service_creator.remove(package) + self.service_creator.remove(package, keep_config=keep_config) self.service_creator.generate_shutdown_sequence_files( self._get_installed_packages_except(package) ) - - # Clean containers based on this image - containers = self.docker.ps(filters={'ancestor': package.image_id}, - all=True) - for container in containers: - self.docker.rm(container.id, force=True) - + self.docker.rm_by_ancestor(package.image_id, force=True) self.docker.rmi(package.image_id, force=True) package.entry.image_id = None except Exception as err: @@ -494,7 +531,6 @@ def upgrade_from_source(self, ) old_feature = old_package.manifest['service']['name'] - new_feature = new_package.manifest['service']['name'] old_version = old_package.manifest['package']['version'] new_version = new_package.manifest['package']['version'] @@ -522,6 +558,13 @@ def upgrade_from_source(self, # After all checks are passed we proceed to actual upgrade + service_create_opts = { + 'register_feature': False, + } + service_remove_opts = { + 'deregister_feature': False, + } + try: with contextlib.ExitStack() as exits: self._uninstall_cli_plugins(old_package) @@ -530,24 +573,25 @@ def upgrade_from_source(self, source.install(new_package) exits.callback(rollback(source.uninstall, new_package)) - if self.feature_registry.is_feature_enabled(old_feature): + feature_enabled = self.feature_registry.is_feature_enabled(old_feature) + + if feature_enabled: + self._systemctl_action(new_package, 'disable') + exits.callback(rollback(self._systemctl_action, + old_package, 'enable')) self._systemctl_action(old_package, 'stop') exits.callback(rollback(self._systemctl_action, old_package, 'start')) - self.service_creator.remove(old_package, deregister_feature=False) + self.service_creator.remove(old_package, **service_remove_opts) exits.callback(rollback(self.service_creator.create, old_package, - register_feature=False)) + **service_create_opts)) - # Clean containers based on the old image - containers = self.docker.ps(filters={'ancestor': old_package.image_id}, - all=True) - for container in containers: - self.docker.rm(container.id, force=True) + self.docker.rm_by_ancestor(old_package.image_id, force=True) - self.service_creator.create(new_package, register_feature=False) + self.service_creator.create(new_package, **service_create_opts) exits.callback(rollback(self.service_creator.remove, new_package, - register_feature=False)) + **service_remove_opts)) self.service_creator.generate_shutdown_sequence_files( self._get_installed_packages_and(new_package) @@ -557,11 +601,23 @@ def upgrade_from_source(self, self._get_installed_packages_and(old_package)) ) - if self.feature_registry.is_feature_enabled(new_feature): + if feature_enabled: + self._systemctl_action(new_package, 'enable') + exits.callback(rollback(self._systemctl_action, + old_package, 'disable')) self._systemctl_action(new_package, 'start') exits.callback(rollback(self._systemctl_action, new_package, 'stop')) + # Update feature configuration after we have started new service. + # If we place it before the above, our service start/stop will + # interfere with hostcfgd in rollback path leading to a service + # running with new image and not the old one. + self.feature_registry.update(old_package.manifest, new_package.manifest) + exits.callback(rollback( + self.feature_registry.update, new_package.manifest, old_package.manifest) + ) + if not skip_host_plugins: self._install_cli_plugins(new_package) exits.callback(rollback(self._uninstall_cli_plugin, old_package)) @@ -613,16 +669,16 @@ def migrate_packages(self, old_package_database: PackageDatabase, dockerd_sock: Optional[str] = None): """ - Migrate packages from old database. This function can do a comparison between - current database and the database passed in as argument. If the package is - missing in the current database it will be added. If the package is installed - in the passed database and in the current it is not installed it will be - installed with a passed database package version. If the package is installed - in the passed database and it is installed in the current database but with - older version the package will be upgraded to the never version. If the package - is installed in the passed database and in the current it is installed but with - never version - no actions are taken. If dockerd_sock parameter is passed, the - migration process will use loaded images from docker library of the currently + Migrate packages from old database. This function can do a comparison between + current database and the database passed in as argument. If the package is + missing in the current database it will be added. If the package is installed + in the passed database and in the current it is not installed it will be + installed with a passed database package version. If the package is installed + in the passed database and it is installed in the current database but with + older version the package will be upgraded to the never version. If the package + is installed in the passed database and in the current it is installed but with + never version - no actions are taken. If dockerd_sock parameter is passed, the + migration process will use loaded images from docker library of the currently installed image. Args: @@ -743,7 +799,7 @@ def get_package_source(self, ref = parse_reference_expression(package_expression) return self.get_package_source(package_ref=ref) elif repository_reference: - repo_ref = DockerReference.parse(repository_reference) + repo_ref = utils.DockerReference.parse(repository_reference) repository = repo_ref['name'] reference = repo_ref['tag'] or repo_ref['digest'] reference = reference or 'latest' @@ -774,8 +830,8 @@ def get_package_source(self, if package_entry.default_reference is not None: package_ref.reference = package_entry.default_reference else: - raise PackageManagerError(f'No default reference tag. ' - f'Please specify the version or tag explicitly') + raise PackageManagerError('No default reference tag. ' + 'Please specify the version or tag explicitly') return RegistrySource(package_entry.repository, package_ref.reference, @@ -847,7 +903,7 @@ def get_installed_packages_list(self) -> List[Package]: Installed packages dictionary. """ - return [self.get_installed_package(entry.name) + return [self.get_installed_package(entry.name) for entry in self.database if entry.installed] def _migrate_package_database(self, old_package_database: PackageDatabase): @@ -906,38 +962,26 @@ def _systemctl_action(self, package: Package, action: str): for npu in range(self.num_npus): run_command(f'systemctl {action} {name}@{npu}') - @staticmethod - def _get_cli_plugin_name(package: Package): - return utils.make_python_identifier(package.name) + '.py' - - @classmethod - def _get_cli_plugin_path(cls, package: Package, command): - pkg_loader = pkgutil.get_loader(f'{command}.plugins') - if pkg_loader is None: - raise PackageManagerError(f'Failed to get plugins path for {command} CLI') - plugins_pkg_path = os.path.dirname(pkg_loader.path) - return os.path.join(plugins_pkg_path, cls._get_cli_plugin_name(package)) - def _install_cli_plugins(self, package: Package): - for command in ('show', 'config', 'clear'): + for command in SONIC_CLI_COMMANDS: self._install_cli_plugin(package, command) def _uninstall_cli_plugins(self, package: Package): - for command in ('show', 'config', 'clear'): + for command in SONIC_CLI_COMMANDS: self._uninstall_cli_plugin(package, command) def _install_cli_plugin(self, package: Package, command: str): image_plugin_path = package.manifest['cli'][command] if not image_plugin_path: return - host_plugin_path = self._get_cli_plugin_path(package, command) + host_plugin_path = get_cli_plugin_path(package, command) self.docker.extract(package.entry.image_id, image_plugin_path, host_plugin_path) def _uninstall_cli_plugin(self, package: Package, command: str): image_plugin_path = package.manifest['cli'][command] if not image_plugin_path: return - host_plugin_path = self._get_cli_plugin_path(package, command) + host_plugin_path = get_cli_plugin_path(package, command) if os.path.exists(host_plugin_path): os.remove(host_plugin_path) @@ -949,12 +993,21 @@ def get_manager() -> 'PackageManager': PackageManager """ - docker_api = DockerApi(docker.from_env()) + docker_api = DockerApi(docker.from_env(), ProgressManager()) registry_resolver = RegistryResolver() - return PackageManager(DockerApi(docker.from_env(), ProgressManager()), + metadata_resolver = MetadataResolver(docker_api, registry_resolver) + cfg_mgmt = config_mgmt.ConfigMgmt(source=INIT_CFG_JSON) + cli_generator = CliGenerator(log) + feature_registry = FeatureRegistry(SonicDB) + service_creator = ServiceCreator(feature_registry, + SonicDB, + cli_generator, + cfg_mgmt) + + return PackageManager(docker_api, registry_resolver, PackageDatabase.from_file(), - MetadataResolver(docker_api, registry_resolver), - ServiceCreator(FeatureRegistry(SonicDB), SonicDB), + metadata_resolver, + service_creator, device_info, filelock.FileLock(PACKAGE_MANAGER_LOCK_FILE, timeout=0)) diff --git a/sonic_package_manager/manifest.py b/sonic_package_manager/manifest.py index c126e2eef1..216baef756 100644 --- a/sonic_package_manager/manifest.py +++ b/sonic_package_manager/manifest.py @@ -205,7 +205,9 @@ def unmarshal(self, value): ManifestField('mandatory', DefaultMarshaller(bool), False), ManifestField('show', DefaultMarshaller(str), ''), ManifestField('config', DefaultMarshaller(str), ''), - ManifestField('clear', DefaultMarshaller(str), '') + ManifestField('clear', DefaultMarshaller(str), ''), + ManifestField('auto-generate-show', DefaultMarshaller(bool), False), + ManifestField('auto-generate-config', DefaultMarshaller(bool), False), ]) ]) diff --git a/sonic_package_manager/metadata.py b/sonic_package_manager/metadata.py index 7f7c25ceaf..dc718375ed 100644 --- a/sonic_package_manager/metadata.py +++ b/sonic_package_manager/metadata.py @@ -4,7 +4,7 @@ import json import tarfile -from typing import Dict +from typing import Dict, Optional from sonic_package_manager.errors import MetadataError from sonic_package_manager.manifest import Manifest @@ -24,10 +24,10 @@ def deep_update(dst: Dict, src: Dict) -> Dict: for key, value in src.items(): if isinstance(value, dict): - node = dst.setdefault(key, {}) - deep_update(node, value) + node = dst.setdefault(key, {}) + deep_update(node, value) else: - dst[key] = value + dst[key] = value return dst @@ -73,6 +73,7 @@ class Metadata: manifest: Manifest components: Dict[str, Version] = field(default_factory=dict) + yang_module_str: Optional[str] = None class MetadataResolver: @@ -182,4 +183,6 @@ def from_labels(cls, labels: Dict[str, str]) -> Metadata: except ValueError as err: raise MetadataError(f'Failed to parse component version: {err}') - return Metadata(Manifest.marshal(manifest_dict), components) + yang_module_str = sonic_metadata.get('yang-module') + + return Metadata(Manifest.marshal(manifest_dict), components, yang_module_str) diff --git a/sonic_package_manager/registry.py b/sonic_package_manager/registry.py index 8a09d9136e..8c03b078d2 100644 --- a/sonic_package_manager/registry.py +++ b/sonic_package_manager/registry.py @@ -38,7 +38,7 @@ def get_token(realm, service, scope) -> str: response = requests.get(f'{realm}?scope={scope}&service={service}') if response.status_code != requests.codes.ok: - raise AuthenticationServiceError(f'Failed to retrieve token') + raise AuthenticationServiceError('Failed to retrieve token') content = json.loads(response.content) token = content['token'] diff --git a/sonic_package_manager/service_creator/__init__.py b/sonic_package_manager/service_creator/__init__.py index e2af81ceb5..b0f4a24086 100644 --- a/sonic_package_manager/service_creator/__init__.py +++ b/sonic_package_manager/service_creator/__init__.py @@ -1,3 +1,4 @@ #!/usr/bin/env python ETC_SONIC_PATH = '/etc/sonic' +SONIC_CLI_COMMANDS = ('show', 'config', 'clear') diff --git a/sonic_package_manager/service_creator/creator.py b/sonic_package_manager/service_creator/creator.py index 4c618eb7ea..91f0f6102c 100644 --- a/sonic_package_manager/service_creator/creator.py +++ b/sonic_package_manager/service_creator/creator.py @@ -5,18 +5,27 @@ import stat import subprocess from collections import defaultdict -from typing import Dict +from typing import Dict, Type import jinja2 as jinja2 +from config.config_mgmt import ConfigMgmt from prettyprinter import pformat from toposort import toposort_flatten, CircularDependencyError +from config.config_mgmt import sonic_cfggen +from sonic_cli_gen.generator import CliGenerator + from sonic_package_manager.logger import log from sonic_package_manager.package import Package -from sonic_package_manager.service_creator import ETC_SONIC_PATH +from sonic_package_manager.service_creator import ( + ETC_SONIC_PATH, + SONIC_CLI_COMMANDS, +) from sonic_package_manager.service_creator.feature import FeatureRegistry +from sonic_package_manager.service_creator.sonic_db import SonicDB from sonic_package_manager.service_creator.utils import in_chroot + SERVICE_FILE_TEMPLATE = 'sonic.service.j2' TIMER_UNIT_TEMPLATE = 'timer.unit.j2' @@ -78,12 +87,22 @@ def set_executable_bit(filepath): os.chmod(filepath, st.st_mode | stat.S_IEXEC) +def remove_if_exists(path): + """ Remove filepath if it exists """ + + if not os.path.exists(path): + return + + os.remove(path) + log.info(f'removed {path}') + + def run_command(command: str): """ Run arbitrary bash command. Args: command: String command to execute as bash script Raises: - PackageManagerError: Raised when the command return code + ServiceCreatorError: Raised when the command return code is not 0. """ @@ -104,24 +123,30 @@ class ServiceCreator: def __init__(self, feature_registry: FeatureRegistry, - sonic_db): + sonic_db: Type[SonicDB], + cli_gen: CliGenerator, + cfg_mgmt: ConfigMgmt): """ Initialize ServiceCreator with: - + Args: feature_registry: FeatureRegistry object. - sonic_db: SonicDb interface. + sonic_db: SonicDB interface. + cli_gen: CliGenerator instance. + cfg_mgmt: ConfigMgmt instance. """ self.feature_registry = feature_registry self.sonic_db = sonic_db + self.cli_gen = cli_gen + self.cfg_mgmt = cfg_mgmt def create(self, package: Package, register_feature: bool = True, state: str = 'enabled', owner: str = 'local'): - """ Register package as SONiC service. - + """ Register package as SONiC service. + Args: package: Package object to install. register_feature: Wether to register this package in FEATURE table. @@ -139,54 +164,54 @@ def create(self, self.generate_systemd_service(package) self.generate_dump_script(package) self.generate_service_reconciliation_file(package) - + self.install_yang_module(package) self.set_initial_config(package) + self.install_autogen_cli_all(package) self._post_operation_hook() if register_feature: - self.feature_registry.register(package.manifest, - state, owner) + self.feature_registry.register(package.manifest, state, owner) except (Exception, KeyboardInterrupt): - self.remove(package, register_feature) + self.remove(package, deregister_feature=register_feature) raise def remove(self, package: Package, - deregister_feature: bool = True): + deregister_feature: bool = True, + keep_config: bool = False): """ Uninstall SONiC service provided by the package. - + Args: package: Package object to uninstall. deregister_feature: Wether to deregister this package from FEATURE table. + keep_config: Whether to remove package configuration. Returns: None """ name = package.manifest['service']['name'] + remove_if_exists(os.path.join(SYSTEMD_LOCATION, f'{name}.service')) + remove_if_exists(os.path.join(SYSTEMD_LOCATION, f'{name}@.service')) + remove_if_exists(os.path.join(SERVICE_MGMT_SCRIPT_LOCATION, f'{name}.sh')) + remove_if_exists(os.path.join(DOCKER_CTL_SCRIPT_LOCATION, f'{name}.sh')) + remove_if_exists(os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, f'{name}')) + remove_if_exists(os.path.join(ETC_SONIC_PATH, f'{name}_reconcile')) + self.update_dependent_list_file(package, remove=True) - def remove_file(path): - if os.path.exists(path): - os.remove(path) - log.info(f'removed {path}') - - remove_file(os.path.join(SYSTEMD_LOCATION, f'{name}.service')) - remove_file(os.path.join(SYSTEMD_LOCATION, f'{name}@.service')) - remove_file(os.path.join(SERVICE_MGMT_SCRIPT_LOCATION, f'{name}.sh')) - remove_file(os.path.join(DOCKER_CTL_SCRIPT_LOCATION, f'{name}.sh')) - remove_file(os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, f'{name}')) - remove_file(os.path.join(ETC_SONIC_PATH, f'{name}_reconcile')) + if deregister_feature and not keep_config: + self.remove_config(package) - self.update_dependent_list_file(package, remove=True) + self.uninstall_autogen_cli_all(package) + self.uninstall_yang_module(package) self._post_operation_hook() if deregister_feature: self.feature_registry.deregister(package.manifest['service']['name']) - self.remove_config(package) def generate_container_mgmt(self, package: Package): - """ Generates container management script under /usr/bin/.sh for package. - + """ Generates container management script under /usr/bin/.sh for package. + Args: package: Package object to generate script for. Returns: @@ -228,8 +253,8 @@ def generate_container_mgmt(self, package: Package): log.info(f'generated {script_path}') def generate_service_mgmt(self, package: Package): - """ Generates service management script under /usr/local/bin/.sh for package. - + """ Generates service management script under /usr/local/bin/.sh for package. + Args: package: Package object to generate script for. Returns: @@ -249,8 +274,8 @@ def generate_service_mgmt(self, package: Package): log.info(f'generated {script_path}') def generate_systemd_service(self, package: Package): - """ Generates systemd service(s) file and timer(s) (if needed) for package. - + """ Generates systemd service(s) file and timer(s) (if needed) for package. + Args: package: Package object to generate service for. Returns: @@ -297,13 +322,13 @@ def generate_systemd_service(self, package: Package): def update_dependent_list_file(self, package: Package, remove=False): """ This function updates dependent list file for packages listed in "dependent-of" (path: /etc/sonic/_dependent file). - Args: package: Package to update packages dependent of it. + remove: True if update for removal process. Returns: None. - """ + name = package.manifest['service']['name'] dependent_of = package.manifest['service']['dependent-of'] host_service = package.manifest['service']['host-service'] @@ -337,7 +362,6 @@ def update_dependent(service, name, multi_inst): def generate_dump_script(self, package): """ Generates dump plugin script for package. - Args: package: Package object to generate dump plugin script for. Returns: @@ -363,7 +387,7 @@ def generate_dump_script(self, package): def get_shutdown_sequence(self, reboot_type: str, packages: Dict[str, Package]): """ Returns shutdown sequence file for particular reboot type. - + Args: reboot_type: Reboot type to generated service shutdown sequence for. packages: Dict of installed packages. @@ -410,7 +434,7 @@ def filter_not_available(services): def generate_shutdown_sequence_file(self, reboot_type: str, packages: Dict[str, Package]): """ Generates shutdown sequence file for particular reboot type (path: /etc/sonic/-reboot_order). - + Args: reboot_type: Reboot type to generated service shutdown sequence for. packages: Dict of installed packages. @@ -421,11 +445,11 @@ def generate_shutdown_sequence_file(self, reboot_type: str, packages: Dict[str, order = self.get_shutdown_sequence(reboot_type, packages) with open(os.path.join(ETC_SONIC_PATH, f'{reboot_type}-reboot_order'), 'w') as file: file.write(' '.join(order)) - + def generate_shutdown_sequence_files(self, packages: Dict[str, Package]): - """ Generates shutdown sequence file for fast and warm reboot. + """ Generates shutdown sequence file for fast and warm reboot. (path: /etc/sonic/-reboot_order). - + Args: packages: Dict of installed packages. Returns: @@ -462,27 +486,18 @@ def set_initial_config(self, package): """ init_cfg = package.manifest['package']['init-cfg'] + if not init_cfg: + return - for tablename, content in init_cfg.items(): - if not isinstance(content, dict): - continue - - tables = self._get_tables(tablename) - - for key in content: - for table in tables: - cfg = content[key] - exists, old_fvs = table.get(key) - if exists: - cfg.update(old_fvs) - fvs = list(cfg.items()) - table.set(key, fvs) + for conn in self.sonic_db.get_connectors(): + cfg = conn.get_config() + new_cfg = init_cfg.copy() + sonic_cfggen.deep_update(new_cfg, cfg) + self.validate_config(new_cfg) + conn.mod_config(new_cfg) def remove_config(self, package): - """ Remove configuration based on init-cfg tables, so having - init-cfg even with tables without keys might be a good idea. - TODO: init-cfg should be validated with yang model - TODO: remove config from tables known to yang model + """ Remove configuration based on package YANG module. Args: package: Package object remove initial configuration for. @@ -490,36 +505,131 @@ def remove_config(self, package): None """ - init_cfg = package.manifest['package']['init-cfg'] + if not package.metadata.yang_module_str: + return - for tablename, content in init_cfg.items(): - if not isinstance(content, dict): + module_name = self.cfg_mgmt.get_module_name(package.metadata.yang_module_str) + for tablename, module in self.cfg_mgmt.sy.confDbYangMap.items(): + if module.get('module') != module_name: continue - tables = self._get_tables(tablename) + for conn in self.sonic_db.get_connectors(): + keys = conn.get_table(tablename).keys() + for key in keys: + conn.set_entry(tablename, key, None) + + def validate_config(self, config): + """ Validate configuration through YANG. + + Args: + config: Config DB data. + Returns: + None. + Raises: + Exception: if config does not pass YANG validation. + """ + + config = sonic_cfggen.FormatConverter.to_serialized(config) + log.debug(f'validating configuration {pformat(config)}') + # This will raise exception if configuration is not valid. + # NOTE: loadData() modifies the state of ConfigMgmt instance. + # This is not desired for configuration validation only purpose. + # Although the config loaded into ConfigMgmt instance is not + # interesting in this application so we don't care. + self.cfg_mgmt.loadData(config) + + def install_yang_module(self, package: Package): + """ Install package's yang module in the system. + + Args: + package: Package object. + Returns: + None + """ + + if not package.metadata.yang_module_str: + return - for key in content: - for table in tables: - table._del(key) + self.cfg_mgmt.add_module(package.metadata.yang_module_str) - def _get_tables(self, table_name): - """ Return swsscommon Tables for all kinds of configuration DBs """ + def uninstall_yang_module(self, package: Package): + """ Uninstall package's yang module in the system. - tables = [] + Args: + package: Package object. + Returns: + None + """ - running_table = self.sonic_db.running_table(table_name) - if running_table is not None: - tables.append(running_table) + if not package.metadata.yang_module_str: + return - persistent_table = self.sonic_db.persistent_table(table_name) - if persistent_table is not None: - tables.append(persistent_table) + module_name = self.cfg_mgmt.get_module_name(package.metadata.yang_module_str) + self.cfg_mgmt.remove_module(module_name) - initial_table = self.sonic_db.initial_table(table_name) - if initial_table is not None: - tables.append(initial_table) + def install_autogen_cli_all(self, package: Package): + """ Install autogenerated CLI plugins for package. + + Args: + package: Package + Returns: + None + """ + + for command in SONIC_CLI_COMMANDS: + self.install_autogen_cli(package, command) + + def uninstall_autogen_cli_all(self, package: Package): + """ Remove autogenerated CLI plugins for package. + + Args: + package: Package + Returns: + None + """ - return tables + for command in SONIC_CLI_COMMANDS: + self.uninstall_autogen_cli(package, command) + + def install_autogen_cli(self, package: Package, command: str): + """ Install autogenerated CLI plugins for package for particular command. + + Args: + package: Package. + command: Name of command to generate CLI for. + Returns: + None + """ + + if package.metadata.yang_module_str is None: + return + if f'auto-generate-{command}' not in package.manifest['cli']: + return + if not package.manifest['cli'][f'auto-generate-{command}']: + return + module_name = self.cfg_mgmt.get_module_name(package.metadata.yang_module_str) + self.cli_gen.generate_cli_plugin(command, module_name) + log.debug(f'{command} command line interface autogenerated for {module_name}') + + def uninstall_autogen_cli(self, package: Package, command: str): + """ Uninstall autogenerated CLI plugins for package for particular command. + + Args: + package: Package. + command: Name of command to remove CLI. + Returns: + None + """ + + if package.metadata.yang_module_str is None: + return + if f'auto-generate-{command}' not in package.manifest['cli']: + return + if not package.manifest['cli'][f'auto-generate-{command}']: + return + module_name = self.cfg_mgmt.get_module_name(package.metadata.yang_module_str) + self.cli_gen.remove_cli_plugin(command, module_name) + log.debug(f'{command} command line interface removed for {module_name}') def _post_operation_hook(self): """ Common operations executed after service is created/removed. """ diff --git a/sonic_package_manager/service_creator/feature.py b/sonic_package_manager/service_creator/feature.py index 4df06384d2..eb8e1a0710 100644 --- a/sonic_package_manager/service_creator/feature.py +++ b/sonic_package_manager/service_creator/feature.py @@ -16,6 +16,14 @@ } +def is_enabled(cfg): + return cfg.get('state', 'disabled').lower() == 'enabled' + + +def is_multi_instance(cfg): + return str(cfg.get('has_per_asic_scope', 'False')).lower() == 'true' + + class FeatureRegistry: """ FeatureRegistry class provides an interface to register/de-register new feature persistently. """ @@ -27,51 +35,93 @@ def register(self, manifest: Manifest, state: str = 'disabled', owner: str = 'local'): + """ Register feature in CONFIG DBs. + + Args: + manifest: Feature's manifest. + state: Desired feature admin state. + owner: Owner of this feature (kube/local). + Returns: + None. + """ + name = manifest['service']['name'] - for table in self._get_tables(): - cfg_entries = self.get_default_feature_entries(state, owner) - non_cfg_entries = self.get_non_configurable_feature_entries(manifest) + db_connectors = self._sonic_db.get_connectors() + cfg_entries = self.get_default_feature_entries(state, owner) + non_cfg_entries = self.get_non_configurable_feature_entries(manifest) - exists, current_cfg = table.get(name) + for conn in db_connectors: + current_cfg = conn.get_entry(FEATURE, name) new_cfg = cfg_entries.copy() # Override configurable entries with CONFIG DB data. - new_cfg = {**new_cfg, **dict(current_cfg)} + new_cfg = {**new_cfg, **current_cfg} # Override CONFIG DB data with non configurable entries. new_cfg = {**new_cfg, **non_cfg_entries} - table.set(name, list(new_cfg.items())) + conn.set_entry(FEATURE, name, new_cfg) def deregister(self, name: str): - for table in self._get_tables(): - table._del(name) + """ Deregister feature by name. + + Args: + name: Name of the feature in CONFIG DB. + Returns: + None + """ + + db_connetors = self._sonic_db.get_connectors() + for conn in db_connetors: + conn.set_entry(FEATURE, name, None) + + def update(self, + old_manifest: Manifest, + new_manifest: Manifest): + """ Migrate feature configuration. It can be that non-configurable + feature entries have to be updated. e.g: "has_timer" for example if + the new feature introduces a service timer or name of the service has + changed, but user configurable entries are not changed). + + Args: + old_manifest: Old feature manifest. + new_manifest: New feature manifest. + Returns: + None + """ + + old_name = old_manifest['service']['name'] + new_name = new_manifest['service']['name'] + db_connectors = self._sonic_db.get_connectors() + non_cfg_entries = self.get_non_configurable_feature_entries(new_manifest) + + for conn in db_connectors: + current_cfg = conn.get_entry(FEATURE, old_name) + conn.set_entry(FEATURE, old_name, None) + + new_cfg = current_cfg.copy() + # Override CONFIG DB data with non configurable entries. + new_cfg = {**new_cfg, **non_cfg_entries} + + conn.set_entry(FEATURE, new_name, new_cfg) def is_feature_enabled(self, name: str) -> bool: """ Returns whether the feature is current enabled or not. Accesses running CONFIG DB. If no running CONFIG_DB table is found in tables returns False. """ - running_db_table = self._sonic_db.running_table(FEATURE) - if running_db_table is None: + conn = self._sonic_db.get_running_db_connector() + if conn is None: return False - exists, cfg = running_db_table.get(name) - if not exists: - return False - cfg = dict(cfg) - return cfg.get('state').lower() == 'enabled' + cfg = conn.get_entry(FEATURE, name) + return is_enabled(cfg) def get_multi_instance_features(self): - res = [] - init_db_table = self._sonic_db.initial_table(FEATURE) - for feature in init_db_table.keys(): - exists, cfg = init_db_table.get(feature) - assert exists - cfg = dict(cfg) - asic_flag = str(cfg.get('has_per_asic_scope', 'False')) - if asic_flag.lower() == 'true': - res.append(feature) - return res + """ Returns a list of features which run in asic namespace. """ + + conn = self._sonic_db.get_initial_db_connector() + features = conn.get_table(FEATURE) + return [feature for feature, cfg in features.items() if is_multi_instance(cfg)] @staticmethod def get_default_feature_entries(state=None, owner=None) -> Dict[str, str]: @@ -94,15 +144,3 @@ def get_non_configurable_feature_entries(manifest) -> Dict[str, str]: 'has_global_scope': str(manifest['service']['host-service']), 'has_timer': str(manifest['service']['delayed']), } - - def _get_tables(self): - tables = [] - running = self._sonic_db.running_table(FEATURE) - if running is not None: # it's Ok if there is no database container running - tables.append(running) - persistent = self._sonic_db.persistent_table(FEATURE) - if persistent is not None: # it's Ok if there is no config_db.json - tables.append(persistent) - tables.append(self._sonic_db.initial_table(FEATURE)) # init_cfg.json is must - - return tables diff --git a/sonic_package_manager/service_creator/sonic_db.py b/sonic_package_manager/service_creator/sonic_db.py index a064c60c4a..6b617cb802 100644 --- a/sonic_package_manager/service_creator/sonic_db.py +++ b/sonic_package_manager/service_creator/sonic_db.py @@ -6,6 +6,8 @@ from swsscommon import swsscommon +from config.config_mgmt import sonic_cfggen + from sonic_package_manager.service_creator import ETC_SONIC_PATH from sonic_package_manager.service_creator.utils import in_chroot @@ -14,46 +16,74 @@ INIT_CFG_JSON = os.path.join(ETC_SONIC_PATH, 'init_cfg.json') -class FileDbTable: - """ swsscommon.Table adapter for persistent DBs. """ - - def __init__(self, file, table): - self._file = file - self._table = table +class PersistentConfigDbConnector: + """ This class implements swsscommon.ConfigDBConnector methods for persistent DBs (JSON files). + For method description refer to swsscommon.ConfigDBConnector. + """ - def keys(self): - with open(self._file) as stream: - config = json.load(stream) - return config.get(self._table, {}).keys() + def __init__(self, filepath): + self._filepath = filepath - def get(self, key): - with open(self._file) as stream: - config = json.load(stream) - - table = config.get(self._table, {}) - exists = key in table - fvs_dict = table.get(key, {}) - fvs = list(fvs_dict.items()) - return exists, fvs - - def set(self, key, fvs): - with open(self._file) as stream: - config = json.load(stream) - - table = config.setdefault(self._table, {}) - table.update({key: dict(fvs)}) - - with open(self._file, 'w') as stream: - json.dump(config, stream, indent=4) - - def _del(self, key): - with open(self._file) as stream: + def get_config(self): + with open(self._filepath) as stream: config = json.load(stream) + config = sonic_cfggen.FormatConverter.to_deserialized(config) + return config + + def get_entry(self, table, key): + table = table.upper() + table_data = self.get_table(table) + return table_data.get(key, {}) + + def get_table(self, table): + table = table.upper() + config = self.get_config() + return config.get(table, {}) + + def set_entry(self, table, key, data): + table = table.upper() + config = self.get_config() + if data is None: + self._del_key(config, table, key) + else: + table_data = config.setdefault(table, {}) + table_data[key] = data + self._write_config(config) + + def mod_entry(self, table, key, data): + table = table.upper() + config = self.get_config() + if data is None: + self._del_key(config, table, key) + else: + table_data = config.setdefault(table, {}) + curr_data = table_data.setdefault(key, {}) + curr_data.update(data) + self._write_config(config) + + def mod_config(self, config): + for table_name in config: + table_data = config[table_name] + if table_data is None: + self._del_table(config, table_name) + continue + for key in table_data: + self.mod_entry(table_name, key, table_data[key]) + + def _del_table(self, config, table): + with contextlib.suppress(KeyError): + config.pop(table) + def _del_key(self, config, table, key): with contextlib.suppress(KeyError): - config[self._table].pop(key) + config[table].pop(key) + + if not config[table]: + self._del_table(config, table) - with open(self._file, 'w') as stream: + def _write_config(self, config): + config = sonic_cfggen.FormatConverter.to_serialized(config) + with open(self._filepath, 'w') as stream: json.dump(config, stream, indent=4) @@ -62,37 +92,52 @@ class SonicDB: running DB and also for persistent and initial configs. """ - _running = None + _running_db_conn = None + + @classmethod + def get_connectors(cls): + """ Yields available DBs connectors. """ + + initial_db_conn = cls.get_initial_db_connector() + persistent_db_conn = cls.get_persistent_db_connector() + running_db_conn = cls.get_running_db_connector() + + yield initial_db_conn + if persistent_db_conn is not None: + yield persistent_db_conn + if running_db_conn is not None: + yield running_db_conn @classmethod - def running_table(cls, table): - """ Returns running DB table. """ + def get_running_db_connector(cls): + """ Returns running DB connector. """ # In chroot we can connect to a running # DB via TCP socket, we should ignore this case. if in_chroot(): return None - if cls._running is None: + if cls._running_db_conn is None: try: - cls._running = swsscommon.DBConnector(CONFIG_DB, 0) + cls._running_db_conn = swsscommon.ConfigDBConnector() + cls._running_db_conn.connect() except RuntimeError: # Failed to connect to DB. - return None + cls._running_db_conn = None - return swsscommon.Table(cls._running, table) + return cls._running_db_conn @classmethod - def persistent_table(cls, table): - """ Returns persistent DB table. """ + def get_persistent_db_connector(cls): + """ Returns persistent DB connector. """ if not os.path.exists(CONFIG_DB_JSON): return None - return FileDbTable(CONFIG_DB_JSON, table) + return PersistentConfigDbConnector(CONFIG_DB_JSON) @classmethod - def initial_table(cls, table): - """ Returns initial DB table. """ + def get_initial_db_connector(cls): + """ Returns initial DB connector. """ - return FileDbTable(INIT_CFG_JSON, table) + return PersistentConfigDbConnector(INIT_CFG_JSON) diff --git a/tests/sonic_package_manager/conftest.py b/tests/sonic_package_manager/conftest.py index 2788a75cd3..1ec067657c 100644 --- a/tests/sonic_package_manager/conftest.py +++ b/tests/sonic_package_manager/conftest.py @@ -7,6 +7,8 @@ import pytest from docker_image.reference import Reference +from config.config_mgmt import ConfigMgmt + from sonic_package_manager.database import PackageDatabase, PackageEntry from sonic_package_manager.manager import DockerApi, PackageManager from sonic_package_manager.manifest import Manifest @@ -62,7 +64,17 @@ def mock_service_creator(): @pytest.fixture def mock_sonic_db(): - yield Mock() + yield MagicMock() + + +@pytest.fixture +def mock_config_mgmt(): + yield MagicMock() + + +@pytest.fixture +def mock_cli_gen(): + yield MagicMock() @pytest.fixture @@ -107,7 +119,7 @@ def __init__(self): 'before': ['swss'], } ) - self.add('Azure/docker-test', '1.6.0', 'test-package', '1.6.0') + self.add('Azure/docker-test', '1.6.0', 'test-package', '1.6.0', yang='TEST') self.add('Azure/docker-test-2', '1.5.0', 'test-package-2', '1.5.0') self.add('Azure/docker-test-2', '2.0.0', 'test-package-2', '2.0.0') self.add('Azure/docker-test-3', 'latest', 'test-package-3', '1.6.0') @@ -124,23 +136,26 @@ def __init__(self): def from_registry(self, repository: str, reference: str): manifest = Manifest.marshal(self.metadata_store[repository][reference]['manifest']) components = self.metadata_store[repository][reference]['components'] - return Metadata(manifest, components) + yang = self.metadata_store[repository][reference]['yang'] + return Metadata(manifest, components, yang) def from_local(self, image: str): ref = Reference.parse(image) manifest = Manifest.marshal(self.metadata_store[ref['name']][ref['tag']]['manifest']) components = self.metadata_store[ref['name']][ref['tag']]['components'] - return Metadata(manifest, components) + yang = self.metadata_store[ref['name']][ref['tag']]['yang'] + return Metadata(manifest, components, yang) def from_tarball(self, filepath: str) -> Manifest: path, ref = filepath.split(':') manifest = Manifest.marshal(self.metadata_store[path][ref]['manifest']) components = self.metadata_store[path][ref]['components'] - return Metadata(manifest, components) + yang = self.metadata_store[path][ref]['yang'] + return Metadata(manifest, components, yang) def add(self, repo, reference, name, version, components=None, warm_shutdown=None, fast_shutdown=None, - processes=None): + processes=None, yang=None): repo_dict = self.metadata_store.setdefault(repo, {}) repo_dict[reference] = { 'manifest': { @@ -157,6 +172,7 @@ def add(self, repo, reference, name, version, components=None, 'processes': processes or [], }, 'components': components or {}, + 'yang': yang, } yield FakeMetadataResolver() @@ -252,7 +268,7 @@ def fake_db(fake_metadata_resolver): description='SONiC Package Manager Test Package', default_reference='1.6.0', installed=False, - built_in=False + built_in=False, ) add_package( content, @@ -402,8 +418,8 @@ def sonic_fs(fs): @pytest.fixture(autouse=True) def patch_pkgutil(): - with mock.patch('pkgutil.get_loader'): - yield + with mock.patch('pkgutil.get_loader') as loader: + yield loader @pytest.fixture diff --git a/tests/sonic_package_manager/test_service_creator.py b/tests/sonic_package_manager/test_service_creator.py index ffa6737531..456cc71a4a 100644 --- a/tests/sonic_package_manager/test_service_creator.py +++ b/tests/sonic_package_manager/test_service_creator.py @@ -1,7 +1,8 @@ #!/usr/bin/env python import os -from unittest.mock import Mock, MagicMock +import copy +from unittest.mock import Mock, call import pytest @@ -59,13 +60,25 @@ def manifest(): }) -def test_service_creator(sonic_fs, manifest, package_manager, mock_feature_registry, mock_sonic_db): - creator = ServiceCreator(mock_feature_registry, mock_sonic_db) +@pytest.fixture() +def service_creator(mock_feature_registry, + mock_sonic_db, + mock_cli_gen, + mock_config_mgmt): + yield ServiceCreator( + mock_feature_registry, + mock_sonic_db, + mock_cli_gen, + mock_config_mgmt + ) + + +def test_service_creator(sonic_fs, manifest, service_creator, package_manager): entry = PackageEntry('test', 'azure/sonic-test') package = Package(entry, Metadata(manifest)) installed_packages = package_manager._get_installed_packages_and(package) - creator.create(package) - creator.generate_shutdown_sequence_files(installed_packages) + service_creator.create(package) + service_creator.generate_shutdown_sequence_files(installed_packages) assert sonic_fs.exists(os.path.join(ETC_SONIC_PATH, 'swss_dependent')) assert sonic_fs.exists(os.path.join(DOCKER_CTL_SCRIPT_LOCATION, 'test.sh')) @@ -81,122 +94,200 @@ def read_file(name): assert read_file('test_reconcile') == 'test-process test-process-3' -def test_service_creator_with_timer_unit(sonic_fs, manifest, mock_feature_registry, mock_sonic_db): - creator = ServiceCreator(mock_feature_registry, mock_sonic_db) +def test_service_creator_with_timer_unit(sonic_fs, manifest, service_creator): entry = PackageEntry('test', 'azure/sonic-test') package = Package(entry, Metadata(manifest)) - creator.create(package) + service_creator.create(package) assert not sonic_fs.exists(os.path.join(SYSTEMD_LOCATION, 'test.timer')) manifest['service']['delayed'] = True package = Package(entry, Metadata(manifest)) - creator.create(package) + service_creator.create(package) assert sonic_fs.exists(os.path.join(SYSTEMD_LOCATION, 'test.timer')) -def test_service_creator_with_debug_dump(sonic_fs, manifest, mock_feature_registry, mock_sonic_db): - creator = ServiceCreator(mock_feature_registry, mock_sonic_db) +def test_service_creator_with_debug_dump(sonic_fs, manifest, service_creator): entry = PackageEntry('test', 'azure/sonic-test') package = Package(entry, Metadata(manifest)) - creator.create(package) + service_creator.create(package) assert not sonic_fs.exists(os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, 'test')) manifest['package']['debug-dump'] = '/some/command' package = Package(entry, Metadata(manifest)) - creator.create(package) + service_creator.create(package) assert sonic_fs.exists(os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, 'test')) -def test_service_creator_initial_config(sonic_fs, manifest, mock_feature_registry, mock_sonic_db): - mock_table = Mock() - mock_table.get = Mock(return_value=(True, (('field_2', 'original_value_2'),))) - mock_sonic_db.initial_table = Mock(return_value=mock_table) - mock_sonic_db.persistent_table = Mock(return_value=mock_table) - mock_sonic_db.running_table = Mock(return_value=mock_table) +def test_service_creator_yang(sonic_fs, manifest, mock_sonic_db, + mock_config_mgmt, service_creator): + test_yang = 'TEST YANG' + test_yang_module = 'sonic-test' - creator = ServiceCreator(mock_feature_registry, mock_sonic_db) + mock_connector = Mock() + mock_sonic_db.get_connectors = Mock(return_value=[mock_connector]) + mock_connector.get_table = Mock(return_value={'key_a': {'field_1': 'value_1'}}) + mock_connector.get_config = Mock(return_value={ + 'TABLE_A': mock_connector.get_table('') + }) entry = PackageEntry('test', 'azure/sonic-test') - package = Package(entry, Metadata(manifest)) - creator.create(package) + package = Package(entry, Metadata(manifest, yang_module_str=test_yang)) + service_creator.create(package) - assert not sonic_fs.exists(os.path.join(DEBUG_DUMP_SCRIPT_LOCATION, 'test')) + mock_config_mgmt.add_module.assert_called_with(test_yang) + mock_config_mgmt.get_module_name = Mock(return_value=test_yang_module) manifest['package']['init-cfg'] = { 'TABLE_A': { 'key_a': { - 'field_1': 'value_1', + 'field_1': 'new_value_1', 'field_2': 'value_2' }, }, } - package = Package(entry, Metadata(manifest)) + package = Package(entry, Metadata(manifest, yang_module_str=test_yang)) + + service_creator.create(package) + + mock_config_mgmt.add_module.assert_called_with(test_yang) + + mock_connector.mod_config.assert_called_with( + { + 'TABLE_A': { + 'key_a': { + 'field_1': 'value_1', + 'field_2': 'value_2', + }, + }, + } + ) + + mock_config_mgmt.sy.confDbYangMap = { + 'TABLE_A': {'module': test_yang_module} + } + + service_creator.remove(package) + mock_connector.set_entry.assert_called_with('TABLE_A', 'key_a', None) + mock_config_mgmt.remove_module.assert_called_with(test_yang_module) - creator.create(package) - mock_table.set.assert_called_with('key_a', [('field_1', 'value_1'), - ('field_2', 'original_value_2')]) - creator.remove(package) - mock_table._del.assert_called_with('key_a') +def test_service_creator_autocli(sonic_fs, manifest, mock_cli_gen, + mock_config_mgmt, service_creator): + test_yang = 'TEST YANG' + test_yang_module = 'sonic-test' + + manifest['cli']['auto-generate-show'] = True + manifest['cli']['auto-generate-config'] = True + + entry = PackageEntry('test', 'azure/sonic-test') + package = Package(entry, Metadata(manifest, yang_module_str=test_yang)) + mock_config_mgmt.get_module_name = Mock(return_value=test_yang_module) + service_creator.create(package) + + mock_cli_gen.generate_cli_plugin.assert_has_calls( + [ + call('show', test_yang_module), + call('config', test_yang_module), + ], + any_order=True + ) + + service_creator.remove(package) + mock_cli_gen.remove_cli_plugin.assert_has_calls( + [ + call('show', test_yang_module), + call('config', test_yang_module), + ], + any_order=True + ) def test_feature_registration(mock_sonic_db, manifest): - mock_feature_table = Mock() - mock_feature_table.get = Mock(return_value=(False, ())) - mock_sonic_db.initial_table = Mock(return_value=mock_feature_table) - mock_sonic_db.persistent_table = Mock(return_value=mock_feature_table) - mock_sonic_db.running_table = Mock(return_value=mock_feature_table) + mock_connector = Mock() + mock_connector.get_entry = Mock(return_value={}) + mock_sonic_db.get_connectors = Mock(return_value=[mock_connector]) feature_registry = FeatureRegistry(mock_sonic_db) feature_registry.register(manifest) - mock_feature_table.set.assert_called_with('test', [ - ('state', 'disabled'), - ('auto_restart', 'enabled'), - ('high_mem_alert', 'disabled'), - ('set_owner', 'local'), - ('has_per_asic_scope', 'False'), - ('has_global_scope', 'True'), - ('has_timer', 'False'), - ]) + mock_connector.set_entry.assert_called_with('FEATURE', 'test', { + 'state': 'disabled', + 'auto_restart': 'enabled', + 'high_mem_alert': 'disabled', + 'set_owner': 'local', + 'has_per_asic_scope': 'False', + 'has_global_scope': 'True', + 'has_timer': 'False', + }) + + +def test_feature_update(mock_sonic_db, manifest): + curr_feature_config = { + 'state': 'enabled', + 'auto_restart': 'enabled', + 'high_mem_alert': 'disabled', + 'set_owner': 'local', + 'has_per_asic_scope': 'False', + 'has_global_scope': 'True', + 'has_timer': 'False', + } + mock_connector = Mock() + mock_connector.get_entry = Mock(return_value=curr_feature_config) + mock_sonic_db.get_connectors = Mock(return_value=[mock_connector]) + feature_registry = FeatureRegistry(mock_sonic_db) + + new_manifest = copy.deepcopy(manifest) + new_manifest['service']['name'] = 'test_new' + new_manifest['service']['delayed'] = True + + feature_registry.update(manifest, new_manifest) + + mock_connector.set_entry.assert_has_calls([ + call('FEATURE', 'test', None), + call('FEATURE', 'test_new', { + 'state': 'enabled', + 'auto_restart': 'enabled', + 'high_mem_alert': 'disabled', + 'set_owner': 'local', + 'has_per_asic_scope': 'False', + 'has_global_scope': 'True', + 'has_timer': 'True', + }), + ], any_order=True) def test_feature_registration_with_timer(mock_sonic_db, manifest): manifest['service']['delayed'] = True - mock_feature_table = Mock() - mock_feature_table.get = Mock(return_value=(False, ())) - mock_sonic_db.initial_table = Mock(return_value=mock_feature_table) - mock_sonic_db.persistent_table = Mock(return_value=mock_feature_table) - mock_sonic_db.running_table = Mock(return_value=mock_feature_table) + mock_connector = Mock() + mock_connector.get_entry = Mock(return_value={}) + mock_sonic_db.get_connectors = Mock(return_value=[mock_connector]) feature_registry = FeatureRegistry(mock_sonic_db) feature_registry.register(manifest) - mock_feature_table.set.assert_called_with('test', [ - ('state', 'disabled'), - ('auto_restart', 'enabled'), - ('high_mem_alert', 'disabled'), - ('set_owner', 'local'), - ('has_per_asic_scope', 'False'), - ('has_global_scope', 'True'), - ('has_timer', 'True'), - ]) + mock_connector.set_entry.assert_called_with('FEATURE', 'test', { + 'state': 'disabled', + 'auto_restart': 'enabled', + 'high_mem_alert': 'disabled', + 'set_owner': 'local', + 'has_per_asic_scope': 'False', + 'has_global_scope': 'True', + 'has_timer': 'True', + }) def test_feature_registration_with_non_default_owner(mock_sonic_db, manifest): - mock_feature_table = Mock() - mock_feature_table.get = Mock(return_value=(False, ())) - mock_sonic_db.initial_table = Mock(return_value=mock_feature_table) - mock_sonic_db.persistent_table = Mock(return_value=mock_feature_table) - mock_sonic_db.running_table = Mock(return_value=mock_feature_table) + mock_connector = Mock() + mock_connector.get_entry = Mock(return_value={}) + mock_sonic_db.get_connectors = Mock(return_value=[mock_connector]) feature_registry = FeatureRegistry(mock_sonic_db) feature_registry.register(manifest, owner='kube') - mock_feature_table.set.assert_called_with('test', [ - ('state', 'disabled'), - ('auto_restart', 'enabled'), - ('high_mem_alert', 'disabled'), - ('set_owner', 'kube'), - ('has_per_asic_scope', 'False'), - ('has_global_scope', 'True'), - ('has_timer', 'False'), - ]) + mock_connector.set_entry.assert_called_with('FEATURE', 'test', { + 'state': 'disabled', + 'auto_restart': 'enabled', + 'high_mem_alert': 'disabled', + 'set_owner': 'kube', + 'has_per_asic_scope': 'False', + 'has_global_scope': 'True', + 'has_timer': 'False', + })