From 51c6afde97a8a29f2c4a15567daf789c66376fbf Mon Sep 17 00:00:00 2001 From: liuzhe-lz <40699903+liuzhe-lz@users.noreply.github.com> Date: Thu, 24 Jun 2021 05:41:16 +0800 Subject: [PATCH 1/8] change ipv4 detection method (#3860) Co-authored-by: liuzhe --- dependencies/required.txt | 1 - nni/tools/nnictl/config_schema.py | 9 --------- ts/nni_manager/common/utils.ts | 29 +++++++++++++---------------- 3 files changed, 13 insertions(+), 26 deletions(-) diff --git a/dependencies/required.txt b/dependencies/required.txt index 917b808377..820b945b8d 100644 --- a/dependencies/required.txt +++ b/dependencies/required.txt @@ -1,7 +1,6 @@ astor hyperopt == 0.1.2 json_tricks -netifaces psutil pyyaml requests diff --git a/nni/tools/nnictl/config_schema.py b/nni/tools/nnictl/config_schema.py index 65e645b560..16725a2dfc 100644 --- a/nni/tools/nnictl/config_schema.py +++ b/nni/tools/nnictl/config_schema.py @@ -5,7 +5,6 @@ import logging import os -import netifaces from schema import And, Optional, Or, Regex, Schema, SchemaError from nni.tools.package_utils import ( create_validator_instance, @@ -493,7 +492,6 @@ def validate_extras(self, experiment_config): self.validate_tuner_adivosr_assessor(experiment_config) self.validate_pai_trial_conifg(experiment_config) self.validate_kubeflow_operators(experiment_config) - self.validate_eth0_device(experiment_config) self.validate_hybrid_platforms(experiment_config) self.validate_frameworkcontroller_trial_config(experiment_config) @@ -599,13 +597,6 @@ def validate_pai_trial_conifg(self, experiment_config): print_warning(warning_information.format('outputDir')) self.validate_pai_config_path(experiment_config) - def validate_eth0_device(self, experiment_config): - '''validate whether the machine has eth0 device''' - if experiment_config.get('trainingServicePlatform') not in ['local'] \ - and not experiment_config.get('nniManagerIp') \ - and 'eth0' not in netifaces.interfaces(): - raise SchemaError('This machine does not contain eth0 network device, please set nniManagerIp in config file!') - def validate_hybrid_platforms(self, experiment_config): required_config_name_map = { 'remote': 'machineList', diff --git a/ts/nni_manager/common/utils.ts b/ts/nni_manager/common/utils.ts index 917eb3ba52..0debf97cb6 100644 --- a/ts/nni_manager/common/utils.ts +++ b/ts/nni_manager/common/utils.ts @@ -8,6 +8,7 @@ import { randomBytes } from 'crypto'; import * as cpp from 'child-process-promise'; import * as cp from 'child_process'; import { ChildProcess, spawn, StdioOptions } from 'child_process'; +import * as dgram from 'dgram'; import * as fs from 'fs'; import * as net from 'net'; import * as os from 'os'; @@ -217,28 +218,24 @@ function cleanupUnitTest(): void { setExperimentStartupInfo(true, 'unittest', 8080, 'unittest', undefined, logLevel); } -let cachedipv4Address: string = ''; +let cachedIpv4Address: string | null = null; + /** - * Get IPv4 address of current machine + * Get IPv4 address of current machine. */ function getIPV4Address(): string { - if (cachedipv4Address && cachedipv4Address.length > 0) { - return cachedipv4Address; + if (cachedIpv4Address !== null) { + return cachedIpv4Address; } - const networkInterfaces = os.networkInterfaces(); - if (networkInterfaces.eth0) { - for (const item of networkInterfaces.eth0) { - if (item.family === 'IPv4') { - cachedipv4Address = item.address; - return cachedipv4Address; - } - } - } else { - throw Error(`getIPV4Address() failed because os.networkInterfaces().eth0 is undefined. Please specify NNI manager IP in config.`); - } + // creates "udp connection" to a non-exist target, and get local address of the connection. + // since udp is connectionless, this does not send actual packets. + const socket = dgram.createSocket('udp4'); + socket.connect(1, '192.0.2.0'); + cachedIpv4Address = socket.address().address; + socket.close(); - throw Error('getIPV4Address() failed because no valid IPv4 address found.') + return cachedIpv4Address; } /** From e5d618093f872705bed274328370cd6245d16b94 Mon Sep 17 00:00:00 2001 From: Yuge Zhang Date: Fri, 25 Jun 2021 12:32:35 +0800 Subject: [PATCH 2/8] Integrate coverage report into CI (#3854) --- .gitignore | 6 ++ dependencies/develop.txt | 2 + dependencies/recommended.txt | 1 + nni/algorithms/__init__.py | 0 nni/algorithms/compression/__init__.py | 0 .../compression/pytorch/__init__.py | 0 .../pytorch/auto_compress/experiment.py | 5 +- .../pytorch/auto_compress/utils.py | 5 +- .../pytorch/pruning/iterative_pruner.py | 5 +- .../pytorch/pruning/sensitivity_pruner.py | 3 +- .../pytorch/quantization/quantizers.py | 5 +- .../feature_engineering/__init__.py | 0 nni/algorithms/hpo/__init__.py | 0 nni/algorithms/hpo/dngo_tuner.py | 5 +- nni/algorithms/hpo/hyperband_advisor.py | 9 ++- nni/algorithms/hpo/metis_tuner/metis_tuner.py | 4 +- .../networkmorphism_tuner.py | 2 +- nni/algorithms/nas/__init__.py | 0 nni/algorithms/nas/pytorch/__init__.py | 0 nni/algorithms/nas/pytorch/cream/trainer.py | 15 +++-- nni/algorithms/nas/pytorch/fbnet/trainer.py | 2 +- nni/algorithms/nas/pytorch/fbnet/utils.py | 3 +- nni/algorithms/nas/tensorflow/__init__.py | 0 .../nas/tensorflow/classic_nas/mutator.py | 2 + nni/algorithms/nas/tensorflow/enas/mutator.py | 2 + nni/common/__init__.py | 0 nni/experiment/launcher.py | 2 +- nni/runtime/__init__.py | 0 nni/runtime/log.py | 6 +- nni/runtime/msg_dispatcher.py | 5 +- nni/runtime/platform/local.py | 2 +- nni/runtime/platform/standalone.py | 3 +- nni/tools/__init__.py | 0 nni/tools/nnictl/launcher.py | 2 +- nni/tools/nnictl/nnictl_utils.py | 62 +++++++++++-------- pipelines/fast-test.yml | 22 +++++-- test/.coveragerc | 6 -- test/pytest.ini | 2 + ts/nni_manager/.gitignore | 7 ++- ts/nni_manager/package.json | 2 +- 40 files changed, 120 insertions(+), 77 deletions(-) create mode 100644 nni/algorithms/__init__.py create mode 100644 nni/algorithms/compression/__init__.py create mode 100644 nni/algorithms/compression/pytorch/__init__.py create mode 100644 nni/algorithms/feature_engineering/__init__.py create mode 100644 nni/algorithms/hpo/__init__.py create mode 100644 nni/algorithms/nas/__init__.py create mode 100644 nni/algorithms/nas/pytorch/__init__.py create mode 100644 nni/algorithms/nas/tensorflow/__init__.py create mode 100644 nni/common/__init__.py create mode 100644 nni/runtime/__init__.py create mode 100644 nni/tools/__init__.py create mode 100644 test/pytest.ini diff --git a/.gitignore b/.gitignore index 482d04e8c9..10e56b752d 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,12 @@ lib-cov # Coverage directory used by tools like istanbul coverage +junit/ +coverage.xml +test-*.xml +.coverage.* +htmlcov/ +.coverage # nyc test coverage .nyc_output diff --git a/dependencies/develop.txt b/dependencies/develop.txt index 986d8ce634..64c2e0aa06 100644 --- a/dependencies/develop.txt +++ b/dependencies/develop.txt @@ -6,5 +6,7 @@ sphinx-rtd-theme sphinxcontrib-websupport nbsphinx pytest +pytest-cov +pytest-azurepipelines coverage ipython diff --git a/dependencies/recommended.txt b/dependencies/recommended.txt index 8aaffd4423..db7080613a 100644 --- a/dependencies/recommended.txt +++ b/dependencies/recommended.txt @@ -2,6 +2,7 @@ -f https://download.pytorch.org/whl/torch_stable.html tensorflow +keras torch == 1.6.0+cpu ; sys_platform != "darwin" torch == 1.6.0 ; sys_platform == "darwin" torchvision == 0.7.0+cpu ; sys_platform != "darwin" diff --git a/nni/algorithms/__init__.py b/nni/algorithms/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/algorithms/compression/__init__.py b/nni/algorithms/compression/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/algorithms/compression/pytorch/__init__.py b/nni/algorithms/compression/pytorch/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/algorithms/compression/pytorch/auto_compress/experiment.py b/nni/algorithms/compression/pytorch/auto_compress/experiment.py index 6859e60a12..07c5c4b3c8 100644 --- a/nni/algorithms/compression/pytorch/auto_compress/experiment.py +++ b/nni/algorithms/compression/pytorch/auto_compress/experiment.py @@ -5,8 +5,6 @@ from pathlib import Path, PurePath from typing import overload, Union, List -from numpy import tri - from nni.experiment import Experiment, ExperimentConfig from nni.algorithms.compression.pytorch.auto_compress.interface import AbstractAutoCompressionModule @@ -62,7 +60,8 @@ def __init__(self, auto_compress_module: AbstractAutoCompressionModule, config=N def start(self, port: int, debug: bool) -> None: trial_code_directory = str(PurePath(Path(self.config.trial_code_directory).absolute())) + '/' - assert self.module_file_path.startswith(trial_code_directory), 'The file path of the user-provided module should under trial_code_directory.' + assert self.module_file_path.startswith(trial_code_directory), \ + 'The file path of the user-provided module should under trial_code_directory.' relative_module_path = self.module_file_path.split(trial_code_directory)[1] # only support linux, need refactor? command = 'python3 -m nni.algorithms.compression.pytorch.auto_compress.trial_entry --module_file_name {} --module_class_name {}' diff --git a/nni/algorithms/compression/pytorch/auto_compress/utils.py b/nni/algorithms/compression/pytorch/auto_compress/utils.py index fe46db63e9..aab372b984 100644 --- a/nni/algorithms/compression/pytorch/auto_compress/utils.py +++ b/nni/algorithms/compression/pytorch/auto_compress/utils.py @@ -31,10 +31,11 @@ def _add_pruner_config(self, pruner_name: str, config_list: list, **algo_kwargs) pruner_name Supported pruner name: 'level', 'slim', 'l1', 'l2', 'fpgm', 'taylorfo', 'apoz', 'mean_activation'. config_list - Except 'op_types' and 'op_names', other config value can be written as `{'_type': ..., '_value': ...}`. + Except 'op_types' and 'op_names', other config value can be written as ``{'_type': ..., '_value': ...}``. **algo_kwargs The additional pruner parameters except 'model', 'config_list', 'optimizer', 'trainer', 'criterion'. - i.e., you can set `statistics_batch_num={'_type': 'choice', '_value': [1, 2, 3]}` in TaylorFOWeightFilterPruner or just `statistics_batch_num=1`. + i.e., you can set ``statistics_batch_num={'_type': 'choice', '_value': [1, 2, 3]}`` + in TaylorFOWeightFilterPruner or just ``statistics_batch_num=1``. """ sub_search_space = {'_name': pruner_name} for config in config_list: diff --git a/nni/algorithms/compression/pytorch/pruning/iterative_pruner.py b/nni/algorithms/compression/pytorch/pruning/iterative_pruner.py index d5cd5dfccb..82940ad737 100644 --- a/nni/algorithms/compression/pytorch/pruning/iterative_pruner.py +++ b/nni/algorithms/compression/pytorch/pruning/iterative_pruner.py @@ -84,7 +84,7 @@ def compress(self): self._trainer(self.bound_model, optimizer=self.optimizer, criterion=self._criterion, epoch=epoch) # NOTE: workaround for statistics_batch_num bigger than max batch number in one epoch, need refactor if hasattr(self.masker, 'statistics_batch_num') and hasattr(self, 'iterations'): - if self.iterations < self.masker.statistics_batch_num: + if self.iterations < self.masker.statistics_batch_num: # pylint: disable=access-member-before-definition self.iterations = self.masker.statistics_batch_num self.update_mask() self.bound_model.train(training) @@ -118,7 +118,8 @@ class AGPPruner(IterativePruner): choose from `['level', 'slim', 'l1', 'l2', 'fpgm', 'taylorfo', 'apoz', 'mean_activation']`, by default `level` """ - def __init__(self, model, config_list, optimizer, trainer, criterion, num_iterations=10, epochs_per_iteration=1, pruning_algorithm='level'): + def __init__(self, model, config_list, optimizer, trainer, criterion, + num_iterations=10, epochs_per_iteration=1, pruning_algorithm='level'): super().__init__(model, config_list, optimizer=optimizer, trainer=trainer, criterion=criterion, num_iterations=num_iterations, epochs_per_iteration=epochs_per_iteration) assert isinstance(optimizer, torch.optim.Optimizer), "AGP pruner is an iterative pruner, please pass optimizer of the model to it" diff --git a/nni/algorithms/compression/pytorch/pruning/sensitivity_pruner.py b/nni/algorithms/compression/pytorch/pruning/sensitivity_pruner.py index ed4d791abd..ea0a725004 100644 --- a/nni/algorithms/compression/pytorch/pruning/sensitivity_pruner.py +++ b/nni/algorithms/compression/pytorch/pruning/sensitivity_pruner.py @@ -10,9 +10,10 @@ from schema import And, Optional from nni.compression.pytorch.compressor import Pruner from nni.compression.pytorch.utils.config_validation import CompressorSchema -from .constants_pruner import PRUNER_DICT from nni.compression.pytorch.utils.sensitivity_analysis import SensitivityAnalysis +from .constants_pruner import PRUNER_DICT + MAX_PRUNE_RATIO_PER_ITER = 0.95 diff --git a/nni/algorithms/compression/pytorch/quantization/quantizers.py b/nni/algorithms/compression/pytorch/quantization/quantizers.py index dbd5e5b3c3..c1e815c4b6 100644 --- a/nni/algorithms/compression/pytorch/quantization/quantizers.py +++ b/nni/algorithms/compression/pytorch/quantization/quantizers.py @@ -245,7 +245,7 @@ def _dequantize(self, op, quantized_val): def quantize_weight(self, wrapper, **kwargs): config = wrapper.config module = wrapper.module - input = kwargs['input_tensor'] + input = kwargs['input_tensor'] # pylint: disable=redefined-builtin weight = copy.deepcopy(wrapper.module.old_weight.data) weight_bits = get_bits_length(config, 'weight') quant_start_step = config.get('quant_start_step', 0) @@ -304,7 +304,8 @@ def quantize_output(self, output, wrapper, **kwargs): module.ema_decay) module.tracked_max_activation = update_ema(module.tracked_max_activation, current_max, module.ema_decay) - module.scale, module.zero_point = update_quantization_param(output_bits, module.tracked_min_activation, module.tracked_max_activation) + module.scale, module.zero_point = update_quantization_param( + output_bits, module.tracked_min_activation, module.tracked_max_activation) out = self._quantize(output_bits, module, output) out = self._dequantize(module, out) return out diff --git a/nni/algorithms/feature_engineering/__init__.py b/nni/algorithms/feature_engineering/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/algorithms/hpo/__init__.py b/nni/algorithms/hpo/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/algorithms/hpo/dngo_tuner.py b/nni/algorithms/hpo/dngo_tuner.py index 276ec75168..0e41ff4ec9 100644 --- a/nni/algorithms/hpo/dngo_tuner.py +++ b/nni/algorithms/hpo/dngo_tuner.py @@ -2,11 +2,12 @@ import numpy as np import torch +from pybnn import DNGO +from torch.distributions import Normal + import nni.parameter_expressions as parameter_expressions from nni import ClassArgsValidator from nni.tuner import Tuner -from pybnn import DNGO -from torch.distributions import Normal _logger = logging.getLogger(__name__) diff --git a/nni/algorithms/hpo/hyperband_advisor.py b/nni/algorithms/hpo/hyperband_advisor.py index 10b9b643bc..2315269f3e 100644 --- a/nni/algorithms/hpo/hyperband_advisor.py +++ b/nni/algorithms/hpo/hyperband_advisor.py @@ -265,8 +265,10 @@ def validate_class_args(self, **kwargs): }).validate(kwargs) class Hyperband(MsgDispatcherBase): - """Hyperband inherit from MsgDispatcherBase rather than Tuner, because it integrates both tuner's functions and assessor's functions. - This is an implementation that could fully leverage available resources or follow the algorithm process, i.e., high parallelism or serial. + """ + Hyperband inherit from MsgDispatcherBase rather than Tuner, because it integrates both tuner's functions and assessor's functions. + This is an implementation that could fully leverage available resources or follow the algorithm process, + i.e., high parallelism or serial. A single execution of Hyperband takes a finite budget of (s_max + 1)B. Parameters @@ -346,7 +348,8 @@ def _get_one_trial_job(self): self.curr_hb += 1 _logger.debug('create a new bracket, self.curr_hb=%d, self.curr_s=%d', self.curr_hb, self.curr_s) self.curr_bracket_id = '{}-{}'.format(self.curr_hb, self.curr_s) - self.brackets[self.curr_bracket_id] = Bracket(self.curr_bracket_id, self.curr_s, self.s_max, self.eta, self.R, self.optimize_mode) + self.brackets[self.curr_bracket_id] = Bracket( + self.curr_bracket_id, self.curr_s, self.s_max, self.eta, self.R, self.optimize_mode) next_n, next_r = self.brackets[self.curr_bracket_id].get_n_r() _logger.debug('new bracket, next_n=%d, next_r=%d', next_n, next_r) assert self.searchspace_json is not None and self.random_state is not None diff --git a/nni/algorithms/hpo/metis_tuner/metis_tuner.py b/nni/algorithms/hpo/metis_tuner/metis_tuner.py index 1a0670f5e5..96d3070741 100644 --- a/nni/algorithms/hpo/metis_tuner/metis_tuner.py +++ b/nni/algorithms/hpo/metis_tuner/metis_tuner.py @@ -15,6 +15,8 @@ from schema import Schema, Optional from nni import ClassArgsValidator +from nni.tuner import Tuner +from nni.utils import OptimizeMode, extract_scalar_reward from . import lib_constraint_summation from . import lib_data from .Regression_GMM import CreateModel as gmm_create_model @@ -23,8 +25,6 @@ from .Regression_GP import OutlierDetection as gp_outlier_detection from .Regression_GP import Prediction as gp_prediction from .Regression_GP import Selection as gp_selection -from nni.tuner import Tuner -from nni.utils import OptimizeMode, extract_scalar_reward logger = logging.getLogger("Metis_Tuner_AutoML") diff --git a/nni/algorithms/hpo/networkmorphism_tuner/networkmorphism_tuner.py b/nni/algorithms/hpo/networkmorphism_tuner/networkmorphism_tuner.py index 385028506d..a61cefa666 100644 --- a/nni/algorithms/hpo/networkmorphism_tuner/networkmorphism_tuner.py +++ b/nni/algorithms/hpo/networkmorphism_tuner/networkmorphism_tuner.py @@ -8,13 +8,13 @@ import logging import os from schema import Optional, Schema +from nni import ClassArgsValidator from nni.tuner import Tuner from nni.utils import OptimizeMode, extract_scalar_reward from .bayesian import BayesianOptimizer from .nn import CnnGenerator, MlpGenerator from .utils import Constant from .graph import graph_to_json, json_to_graph -from nni import ClassArgsValidator logger = logging.getLogger("NetworkMorphism_AutoML") diff --git a/nni/algorithms/nas/__init__.py b/nni/algorithms/nas/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/algorithms/nas/pytorch/__init__.py b/nni/algorithms/nas/pytorch/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/algorithms/nas/pytorch/cream/trainer.py b/nni/algorithms/nas/pytorch/cream/trainer.py index 50830ce64b..b44f404669 100644 --- a/nni/algorithms/nas/pytorch/cream/trainer.py +++ b/nni/algorithms/nas/pytorch/cream/trainer.py @@ -1,11 +1,10 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. -import os -import torch import logging - from copy import deepcopy + +import torch from nni.nas.pytorch.trainer import Trainer from nni.nas.pytorch.utils import AverageMeterGroup @@ -209,8 +208,8 @@ def _simulate_sgd_update(self, w, g, optimizer): return g * optimizer.param_groups[-1]['lr'] + w # split training images into several slices - def _get_minibatch_input(self, input): - slice = self.slices + def _get_minibatch_input(self, input): # pylint: disable=redefined-builtin + slice = self.slices # pylint: disable=redefined-builtin x = deepcopy(input[:slice].clone().detach()) return x @@ -259,8 +258,8 @@ def _cross_entropy_loss_with_soft_target(self, pred, soft_target): return torch.mean(torch.sum(- soft_target * logsoftmax(pred), 1)) # forward validation data - def _forward_validation(self, input, target): - slice = self.slices + def _forward_validation(self, input, target): # pylint: disable=redefined-builtin + slice = self.slices # pylint: disable=redefined-builtin x = input[slice:slice * 2].clone() self._replace_mutator_cand(self.current_student_arch) @@ -281,7 +280,7 @@ def _replace_mutator_cand(self, cand): self.mutator._cache = cand # update meta matching networks - def _run_update(self, input, target, batch_idx): + def _run_update(self, input, target, batch_idx): # pylint: disable=redefined-builtin if self._isUpdateMeta(batch_idx): x = self._get_minibatch_input(input) diff --git a/nni/algorithms/nas/pytorch/fbnet/trainer.py b/nni/algorithms/nas/pytorch/fbnet/trainer.py index 7588ecf5e5..1eaababef2 100644 --- a/nni/algorithms/nas/pytorch/fbnet/trainer.py +++ b/nni/algorithms/nas/pytorch/fbnet/trainer.py @@ -128,7 +128,7 @@ def _layer_choice_sample(self): layer_id = 0 for i, stage_name in enumerate(stages): ops_names = [op for op in self.lookup_table.lut_ops[stage_name]] - for j in range(stage_lnum[i]): + for _ in range(stage_lnum[i]): searched_op = ops_names[choice_ids[layer_id]] choice_names.append(searched_op) layer_id += 1 diff --git a/nni/algorithms/nas/pytorch/fbnet/utils.py b/nni/algorithms/nas/pytorch/fbnet/utils.py index 332f2e2c62..925b612a50 100644 --- a/nni/algorithms/nas/pytorch/fbnet/utils.py +++ b/nni/algorithms/nas/pytorch/fbnet/utils.py @@ -3,7 +3,6 @@ from __future__ import absolute_import, division, print_function -import gc # noqa: F401 import os import timeit import torch @@ -159,7 +158,7 @@ def supernet_sample(model, state_dict, sampled_arch=[], lookup_table=None): layer_id = 0 for i, stage in enumerate(stages): ops_names = [op_name for op_name in lookup_table.lut_ops[stage]] - for j in range(stage_lnum[i]): + for _ in range(stage_lnum[i]): searched_op = sampled_arch[layer_id] op_i = ops_names.index(searched_op) replace.append( diff --git a/nni/algorithms/nas/tensorflow/__init__.py b/nni/algorithms/nas/tensorflow/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/algorithms/nas/tensorflow/classic_nas/mutator.py b/nni/algorithms/nas/tensorflow/classic_nas/mutator.py index fad4987fed..cb089c49b8 100644 --- a/nni/algorithms/nas/tensorflow/classic_nas/mutator.py +++ b/nni/algorithms/nas/tensorflow/classic_nas/mutator.py @@ -1,6 +1,8 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. +# pylint: skip-file + import json import logging import os diff --git a/nni/algorithms/nas/tensorflow/enas/mutator.py b/nni/algorithms/nas/tensorflow/enas/mutator.py index de43195fa2..313c81cc9b 100644 --- a/nni/algorithms/nas/tensorflow/enas/mutator.py +++ b/nni/algorithms/nas/tensorflow/enas/mutator.py @@ -1,6 +1,8 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. +# pylint: skip-file + import tensorflow as tf from tensorflow.keras.layers import Dense, Embedding, LSTMCell, RNN from tensorflow.keras.losses import SparseCategoricalCrossentropy, Reduction diff --git a/nni/common/__init__.py b/nni/common/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/experiment/launcher.py b/nni/experiment/launcher.py index 7ee9e43242..7c6ed5a490 100644 --- a/nni/experiment/launcher.py +++ b/nni/experiment/launcher.py @@ -12,7 +12,7 @@ import colorama -import nni_node # pylint: disable=import-error +import nni_node # pylint: disable=wrong-import-order, import-error import nni.runtime.protocol from .config import ExperimentConfig diff --git a/nni/runtime/__init__.py b/nni/runtime/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/runtime/log.py b/nni/runtime/log.py index 0fc97c666f..276a0f3b4d 100644 --- a/nni/runtime/log.py +++ b/nni/runtime/log.py @@ -1,17 +1,15 @@ +import logging +import sys from datetime import datetime from io import TextIOBase -import logging from logging import FileHandler, Formatter, Handler, StreamHandler from pathlib import Path -import sys -import time from typing import Optional import colorama from .env_vars import dispatcher_env_vars, trial_env_vars - handlers = {} log_format = '[%(asctime)s] %(levelname)s (%(name)s/%(threadName)s) %(message)s' diff --git a/nni/runtime/msg_dispatcher.py b/nni/runtime/msg_dispatcher.py index 20b9597f9e..7276a099ca 100644 --- a/nni/runtime/msg_dispatcher.py +++ b/nni/runtime/msg_dispatcher.py @@ -6,11 +6,12 @@ import json_tricks from nni import NoMoreTrialError -from .protocol import CommandType, send -from .msg_dispatcher_base import MsgDispatcherBase from nni.assessor import AssessResult + from .common import multi_thread_enabled, multi_phase_enabled from .env_vars import dispatcher_env_vars +from .msg_dispatcher_base import MsgDispatcherBase +from .protocol import CommandType, send from ..utils import MetricType, to_json _logger = logging.getLogger(__name__) diff --git a/nni/runtime/platform/local.py b/nni/runtime/platform/local.py index b1f26462e2..d24caf52a5 100644 --- a/nni/runtime/platform/local.py +++ b/nni/runtime/platform/local.py @@ -7,8 +7,8 @@ import time import subprocess -from ..env_vars import trial_env_vars from nni.utils import to_json +from ..env_vars import trial_env_vars _sysdir = trial_env_vars.NNI_SYS_DIR if not os.path.exists(os.path.join(_sysdir, '.nni')): diff --git a/nni/runtime/platform/standalone.py b/nni/runtime/platform/standalone.py index 70caad4e66..8cfaaa68b4 100644 --- a/nni/runtime/platform/standalone.py +++ b/nni/runtime/platform/standalone.py @@ -1,9 +1,10 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. -import colorama import logging import warnings + +import colorama import json_tricks __all__ = [ diff --git a/nni/tools/__init__.py b/nni/tools/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nni/tools/nnictl/launcher.py b/nni/tools/nnictl/launcher.py index 9144bc0267..907aacb70a 100644 --- a/nni/tools/nnictl/launcher.py +++ b/nni/tools/nnictl/launcher.py @@ -14,7 +14,7 @@ from nni.experiment.config import ExperimentConfig, convert from nni.tools.annotation import expand_annotations, generate_search_space from nni.tools.package_utils import get_builtin_module_class_name -import nni_node # pylint: disable=import-error +import nni_node # pylint: disable=import-error, wrong-import-order from .launcher_utils import validate_all_content from .rest_utils import rest_put, rest_post, check_rest_server, check_response from .url_utils import cluster_metadata_url, experiment_url, get_local_urls, set_prefix_url diff --git a/nni/tools/nnictl/nnictl_utils.py b/nni/tools/nnictl/nnictl_utils.py index 7ef0fe7dd7..1f23fd451b 100644 --- a/nni/tools/nnictl/nnictl_utils.py +++ b/nni/tools/nnictl/nnictl_utils.py @@ -13,7 +13,7 @@ from datetime import datetime, timezone from subprocess import Popen from nni.tools.annotation import expand_annotations -import nni_node # pylint: disable=import-error +import nni_node # pylint: disable=wrong-import-order, import-error from .rest_utils import rest_get, rest_delete, check_rest_server_quick, check_response from .url_utils import trial_jobs_url, experiment_url, trial_job_id_url, export_data_url, metric_data_url from .config_utils import Config, Experiments @@ -76,13 +76,16 @@ def check_experiment_id(args, update=True): print_error('There are multiple experiments, please set the experiment id...') experiment_information = "" for key in running_experiment_list: - experiment_information += EXPERIMENT_DETAIL_FORMAT % (key, - experiments_dict[key].get('experimentName', 'N/A'), - experiments_dict[key]['status'], - experiments_dict[key].get('port', 'N/A'), - experiments_dict[key].get('platform'), - time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['startTime'] / 1000)) if isinstance(experiments_dict[key]['startTime'], int) else experiments_dict[key]['startTime'], - time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['endTime'] / 1000)) if isinstance(experiments_dict[key]['endTime'], int) else experiments_dict[key]['endTime']) + experiment_information += EXPERIMENT_DETAIL_FORMAT % ( + key, + experiments_dict[key].get('experimentName', 'N/A'), + experiments_dict[key]['status'], + experiments_dict[key].get('port', 'N/A'), + experiments_dict[key].get('platform'), + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['startTime'] / 1000)) \ + if isinstance(experiments_dict[key]['startTime'], int) else experiments_dict[key]['startTime'], + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['endTime'] / 1000)) \ + if isinstance(experiments_dict[key]['endTime'], int) else experiments_dict[key]['endTime']) print(EXPERIMENT_INFORMATION_FORMAT % experiment_information) exit(1) elif not running_experiment_list: @@ -136,13 +139,16 @@ def parse_ids(args): print_error('There are multiple experiments, please set the experiment id...') experiment_information = "" for key in running_experiment_list: - experiment_information += EXPERIMENT_DETAIL_FORMAT % (key, - experiments_dict[key].get('experimentName', 'N/A'), - experiments_dict[key]['status'], - experiments_dict[key].get('port', 'N/A'), - experiments_dict[key].get('platform'), - time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['startTime'] / 1000)) if isinstance(experiments_dict[key]['startTime'], int) else experiments_dict[key]['startTime'], - time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['endTime'] / 1000)) if isinstance(experiments_dict[key]['endTime'], int) else experiments_dict[key]['endTime']) + experiment_information += EXPERIMENT_DETAIL_FORMAT % ( + key, + experiments_dict[key].get('experimentName', 'N/A'), + experiments_dict[key]['status'], + experiments_dict[key].get('port', 'N/A'), + experiments_dict[key].get('platform'), + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['startTime'] / 1000)) \ + if isinstance(experiments_dict[key]['startTime'], int) else experiments_dict[key]['startTime'], + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['endTime'] / 1000)) \ + if isinstance(experiments_dict[key]['endTime'], int) else experiments_dict[key]['endTime']) print(EXPERIMENT_INFORMATION_FORMAT % experiment_information) exit(1) else: @@ -615,13 +621,16 @@ def experiment_list(args): print_warning('There is no experiment running...\nYou can use \'nnictl experiment list --all\' to list all experiments.') experiment_information = "" for key in experiment_id_list: - experiment_information += EXPERIMENT_DETAIL_FORMAT % (key, - experiments_dict[key].get('experimentName', 'N/A'), - experiments_dict[key]['status'], - experiments_dict[key].get('port', 'N/A'), - experiments_dict[key].get('platform'), - time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['startTime'] / 1000)) if isinstance(experiments_dict[key]['startTime'], int) else experiments_dict[key]['startTime'], - time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['endTime'] / 1000)) if isinstance(experiments_dict[key]['endTime'], int) else experiments_dict[key]['endTime']) + experiment_information += EXPERIMENT_DETAIL_FORMAT % ( + key, + experiments_dict[key].get('experimentName', 'N/A'), + experiments_dict[key]['status'], + experiments_dict[key].get('port', 'N/A'), + experiments_dict[key].get('platform'), + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['startTime'] / 1000)) \ + if isinstance(experiments_dict[key]['startTime'], int) else experiments_dict[key]['startTime'], + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['endTime'] / 1000)) \ + if isinstance(experiments_dict[key]['endTime'], int) else experiments_dict[key]['endTime']) print(EXPERIMENT_INFORMATION_FORMAT % experiment_information) return experiment_id_list @@ -656,9 +665,12 @@ def show_experiment_info(): print_warning('There is no experiment running...') return for key in experiment_id_list: - print(EXPERIMENT_MONITOR_INFO % (key, experiments_dict[key]['status'], experiments_dict[key]['port'], \ - experiments_dict[key].get('platform'), time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['startTime'] / 1000)) if isinstance(experiments_dict[key]['startTime'], int) else experiments_dict[key]['startTime'], \ - get_time_interval(experiments_dict[key]['startTime'], experiments_dict[key]['endTime']))) + print(EXPERIMENT_MONITOR_INFO % ( + key, experiments_dict[key]['status'], experiments_dict[key]['port'], + experiments_dict[key].get('platform'), + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(experiments_dict[key]['startTime'] / 1000)) \ + if isinstance(experiments_dict[key]['startTime'], int) else experiments_dict[key]['startTime'], + get_time_interval(experiments_dict[key]['startTime'], experiments_dict[key]['endTime']))) print(TRIAL_MONITOR_HEAD) running, response = check_rest_server_quick(experiments_dict[key]['port']) if running: diff --git a/pipelines/fast-test.yml b/pipelines/fast-test.yml index fdf004e31c..408e68a311 100644 --- a/pipelines/fast-test.yml +++ b/pipelines/fast-test.yml @@ -182,12 +182,13 @@ stages: - script: | set -e cd test - python -m pytest ut --ignore=ut/sdk/test_pruners.py \ + python -m pytest ut --cov-config=.coveragerc \ + --ignore=ut/sdk/test_pruners.py \ --ignore=ut/sdk/test_compressor_tf.py \ --ignore=ut/sdk/test_compressor_torch.py - python -m pytest ut/sdk/test_pruners.py - python -m pytest ut/sdk/test_compressor_tf.py - python -m pytest ut/sdk/test_compressor_torch.py + python -m pytest ut/sdk/test_pruners.py --cov-config=.coveragerc --cov-append + python -m pytest ut/sdk/test_compressor_tf.py --cov-config=.coveragerc --cov-append + python -m pytest ut/sdk/test_compressor_torch.py --cov-config=.coveragerc --cov-append displayName: Python unit test - script: | @@ -198,6 +199,19 @@ stages: CI=true yarn test displayName: TypeScript unit test + - task: PublishTestResults@2 + condition: succeededOrFailed() + inputs: + testResultsFiles: '$(System.DefaultWorkingDirectory)/**/test-*.xml' + testRunTitle: 'Publish test results for Python $(python.version)' + displayName: Publish test results + + - task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: Cobertura + summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/*coverage.xml' + displayName: Publish code coverage results + - script: | cd test python nni_test/nnitest/run_tests.py --config config/pr_tests.yml diff --git a/test/.coveragerc b/test/.coveragerc index 1565dbf27b..2d76ac1916 100644 --- a/test/.coveragerc +++ b/test/.coveragerc @@ -1,9 +1,6 @@ # .coveragerc to control coverage.py [run] branch = True -parallel = True -data_file = ${COVERAGE_DATA_FILE} -source = nni, nni.tools.cmd, nni.tools.trial_tool concurrency = multiprocessing @@ -26,6 +23,3 @@ exclude_lines = if __name__ == .__main__.: ignore_errors = True - -[html] -directory = ${COVERAGE_HTML_DIR} diff --git a/test/pytest.ini b/test/pytest.ini new file mode 100644 index 0000000000..5daf6aa426 --- /dev/null +++ b/test/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = --cov=nni --cov-config=.coveragerc --junitxml=junit/test-results.xml --cov-report=xml --cov-report=html --cov-config=.coveragerc diff --git a/ts/nni_manager/.gitignore b/ts/nni_manager/.gitignore index 9f0035fd3a..a415c379ba 100644 --- a/ts/nni_manager/.gitignore +++ b/ts/nni_manager/.gitignore @@ -1,5 +1,8 @@ -#Build result +# Build result dist/ -#node modules +# node modules node_modules/ + +# test files +.experiment.test diff --git a/ts/nni_manager/package.json b/ts/nni_manager/package.json index c8b0eae512..0ad8f5f78b 100644 --- a/ts/nni_manager/package.json +++ b/ts/nni_manager/package.json @@ -4,7 +4,7 @@ "main": "index.js", "scripts": { "build": "tsc", - "test": "nyc mocha -r ts-node/register -t 15000 --recursive **/*.test.ts --exclude node_modules/**/**/*.test.ts --colors", + "test": "nyc --reporter=cobertura --reporter=html --reporter=text --report-dir=./htmlcov mocha -r ts-node/register -t 15000 --recursive **/*.test.ts --exclude node_modules/**/**/*.test.ts --colors", "start": "node dist/main.js", "watch": "tsc --watch", "eslint": "npx eslint ./ --ext .ts" From af5551f929b1906345abd5866959c12c83c7bdbd Mon Sep 17 00:00:00 2001 From: SparkSnail Date: Fri, 25 Jun 2021 16:41:14 +0800 Subject: [PATCH 3/8] Fix trainingService initialize error in View mode (#3872) --- ts/nni_manager/core/nnimanager.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/ts/nni_manager/core/nnimanager.ts b/ts/nni_manager/core/nnimanager.ts index ee5aa0784e..959371d09a 100644 --- a/ts/nni_manager/core/nnimanager.ts +++ b/ts/nni_manager/core/nnimanager.ts @@ -204,11 +204,6 @@ class NNIManager implements Manager { const experimentId: string = getExperimentId(); this.log.info(`Resuming experiment: ${experimentId}`); this.experimentProfile = await this.dataStore.getExperimentProfile(experimentId); - this.readonly = readonly; - if (readonly) { - this.setStatus('VIEWED'); - return Promise.resolve(); - } const config: ExperimentConfig = this.experimentProfile.params; this.config = config; @@ -217,6 +212,12 @@ class NNIManager implements Manager { this.trainingService = await this.initTrainingService(config); } + this.readonly = readonly; + if (readonly) { + this.setStatus('VIEWED'); + return; + } + this.log.info('Setup tuner...'); const dispatcherCommand: string = getMsgDispatcherCommand(config); this.log.debug(`dispatcher command: ${dispatcherCommand}`); From 749a463aeeab98d08a428f1a7c2494982da23515 Mon Sep 17 00:00:00 2001 From: liuzhe-lz <40699903+liuzhe-lz@users.noreply.github.com> Date: Mon, 28 Jun 2021 11:00:48 +0800 Subject: [PATCH 4/8] Support prefix for webui static files (#3874) --- ts/webui/config/webpack.config.js | 11 ++++------- .../src/components/stateless-component/NNItabs.tsx | 4 +++- ts/webui/src/index.tsx | 2 +- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/ts/webui/config/webpack.config.js b/ts/webui/config/webpack.config.js index 849de5a104..f04868625b 100644 --- a/ts/webui/config/webpack.config.js +++ b/ts/webui/config/webpack.config.js @@ -54,19 +54,16 @@ module.exports = function(webpackEnv) { // Webpack uses `publicPath` to determine where the app is being served from. // It requires a trailing slash, or the file assets will get an incorrect path. // In development, we always serve from the root. This makes config easier. - const publicPath = isEnvProduction - ? paths.servedPath - : isEnvDevelopment && '/'; + //const publicPath = isEnvProduction ? paths.servedPath : isEnvDevelopment && '/'; + const publicPath = './'; // Some apps do not use client-side routing with pushState. // For these, "homepage" can be set to "." to enable relative asset paths. - const shouldUseRelativeAssetPaths = publicPath === './'; + const shouldUseRelativeAssetPaths = true; // `publicUrl` is just like `publicPath`, but we will provide it to our app // as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript. // Omit trailing slash as %PUBLIC_URL%/xyz looks better than %PUBLIC_URL%xyz. - const publicUrl = isEnvProduction - ? publicPath.slice(0, -1) - : isEnvDevelopment && ''; + const publicUrl = '.'; // Get environment variables to inject into our app. const env = getClientEnvironment(publicUrl); diff --git a/ts/webui/src/components/stateless-component/NNItabs.tsx b/ts/webui/src/components/stateless-component/NNItabs.tsx index 277e7c2792..f7ca44fe3e 100644 --- a/ts/webui/src/components/stateless-component/NNItabs.tsx +++ b/ts/webui/src/components/stateless-component/NNItabs.tsx @@ -1,6 +1,8 @@ import * as React from 'react'; import { NavLink } from 'react-router-dom'; +import { getPrefix } from '../../static/function'; + const OVERVIEWTABS = ( Overview @@ -15,7 +17,7 @@ const DETAILTABS = ( const NNILOGO = ( - NNI logo + NNI logo ); diff --git a/ts/webui/src/index.tsx b/ts/webui/src/index.tsx index 42397eb740..c0ed77370a 100644 --- a/ts/webui/src/index.tsx +++ b/ts/webui/src/index.tsx @@ -17,7 +17,7 @@ ReactDOM.render( - + } > From 32fdd32bfe99f89c1ab039223dd2f2ac998cffa5 Mon Sep 17 00:00:00 2001 From: liuzhe-lz <40699903+liuzhe-lz@users.noreply.github.com> Date: Wed, 30 Jun 2021 15:49:56 +0800 Subject: [PATCH 5/8] Add simple HPO search space validation (#3877) --- nni/algorithms/hpo/batch_tuner.py | 2 + nni/algorithms/hpo/dngo_tuner.py | 2 + nni/algorithms/hpo/gp_tuner/gp_tuner.py | 2 + nni/algorithms/hpo/gridsearch_tuner.py | 2 + nni/algorithms/hpo/hyperband_advisor.py | 2 + nni/algorithms/hpo/hyperopt_tuner.py | 2 + nni/algorithms/hpo/metis_tuner/metis_tuner.py | 3 + nni/algorithms/hpo/smac_tuner/smac_tuner.py | 2 + nni/common/hpo_utils.py | 75 +++++++++++++++++++ test/ut/sdk/test_hpo_utils.py | 52 +++++++++++++ 10 files changed, 144 insertions(+) create mode 100644 nni/common/hpo_utils.py create mode 100644 test/ut/sdk/test_hpo_utils.py diff --git a/nni/algorithms/hpo/batch_tuner.py b/nni/algorithms/hpo/batch_tuner.py index 4f73fce945..b2df2617cb 100644 --- a/nni/algorithms/hpo/batch_tuner.py +++ b/nni/algorithms/hpo/batch_tuner.py @@ -9,6 +9,7 @@ class BatchTuner import logging import nni +from nni.common.hpo_utils import validate_search_space from nni.tuner import Tuner TYPE = '_type' @@ -75,6 +76,7 @@ def update_search_space(self, search_space): ---------- search_space : dict """ + validate_search_space(search_space, ['choice']) self._values = self.is_valid(search_space) def generate_parameters(self, parameter_id, **kwargs): diff --git a/nni/algorithms/hpo/dngo_tuner.py b/nni/algorithms/hpo/dngo_tuner.py index 0e41ff4ec9..58ceecfcd9 100644 --- a/nni/algorithms/hpo/dngo_tuner.py +++ b/nni/algorithms/hpo/dngo_tuner.py @@ -7,6 +7,7 @@ import nni.parameter_expressions as parameter_expressions from nni import ClassArgsValidator +from nni.common.hpo_utils import validate_search_space from nni.tuner import Tuner _logger = logging.getLogger(__name__) @@ -86,6 +87,7 @@ def generate_parameters(self, parameter_id, **kwargs): return new_x def update_search_space(self, search_space): + validate_search_space(search_space, ['choice', 'randint', 'uniform', 'quniform', 'loguniform', 'qloguniform']) self.searchspace_json = search_space self.random_state = np.random.RandomState() diff --git a/nni/algorithms/hpo/gp_tuner/gp_tuner.py b/nni/algorithms/hpo/gp_tuner/gp_tuner.py index c4e6e9a89c..6beaf2c3a9 100644 --- a/nni/algorithms/hpo/gp_tuner/gp_tuner.py +++ b/nni/algorithms/hpo/gp_tuner/gp_tuner.py @@ -16,6 +16,7 @@ from sklearn.gaussian_process import GaussianProcessRegressor from nni import ClassArgsValidator +from nni.common.hpo_utils import validate_search_space from nni.tuner import Tuner from nni.utils import OptimizeMode, extract_scalar_reward @@ -103,6 +104,7 @@ def update_search_space(self, search_space): Override of the abstract method in :class:`~nni.tuner.Tuner`. """ + validate_search_space(search_space, ['choice', 'randint', 'uniform', 'quniform', 'loguniform', 'qloguniform']) self._space = TargetSpace(search_space, self._random_state) def generate_parameters(self, parameter_id, **kwargs): diff --git a/nni/algorithms/hpo/gridsearch_tuner.py b/nni/algorithms/hpo/gridsearch_tuner.py index 4787b3f0ee..be8f143b18 100644 --- a/nni/algorithms/hpo/gridsearch_tuner.py +++ b/nni/algorithms/hpo/gridsearch_tuner.py @@ -11,6 +11,7 @@ class GridSearchTuner import numpy as np import nni +from nni.common.hpo_utils import validate_search_space from nni.tuner import Tuner from nni.utils import convert_dict2tuple @@ -144,6 +145,7 @@ def update_search_space(self, search_space): search_space : dict The format could be referred to search space spec (https://nni.readthedocs.io/en/latest/Tutorial/SearchSpaceSpec.html). """ + validate_search_space(search_space, ['choice', 'randint', 'quniform']) self.expanded_search_space = self._json2parameter(search_space) def generate_parameters(self, parameter_id, **kwargs): diff --git a/nni/algorithms/hpo/hyperband_advisor.py b/nni/algorithms/hpo/hyperband_advisor.py index 2315269f3e..7d07de1f76 100644 --- a/nni/algorithms/hpo/hyperband_advisor.py +++ b/nni/algorithms/hpo/hyperband_advisor.py @@ -15,6 +15,7 @@ from schema import Schema, Optional from nni import ClassArgsValidator +from nni.common.hpo_utils import validate_search_space from nni.runtime.common import multi_phase_enabled from nni.runtime.msg_dispatcher_base import MsgDispatcherBase from nni.runtime.protocol import CommandType, send @@ -379,6 +380,7 @@ def _get_one_trial_job(self): def handle_update_search_space(self, data): """data: JSON object, which is search space """ + validate_search_space(data) self.searchspace_json = data self.random_state = np.random.RandomState() diff --git a/nni/algorithms/hpo/hyperopt_tuner.py b/nni/algorithms/hpo/hyperopt_tuner.py index fbe9cda13f..07aaa1246f 100644 --- a/nni/algorithms/hpo/hyperopt_tuner.py +++ b/nni/algorithms/hpo/hyperopt_tuner.py @@ -12,6 +12,7 @@ import numpy as np from schema import Optional, Schema from nni import ClassArgsValidator +from nni.common.hpo_utils import validate_search_space from nni.tuner import Tuner from nni.utils import NodeType, OptimizeMode, extract_scalar_reward, split_index @@ -246,6 +247,7 @@ def update_search_space(self, search_space): ---------- search_space : dict """ + validate_search_space(search_space) self.json = search_space search_space_instance = json2space(self.json) diff --git a/nni/algorithms/hpo/metis_tuner/metis_tuner.py b/nni/algorithms/hpo/metis_tuner/metis_tuner.py index 96d3070741..0575f463e6 100644 --- a/nni/algorithms/hpo/metis_tuner/metis_tuner.py +++ b/nni/algorithms/hpo/metis_tuner/metis_tuner.py @@ -16,6 +16,7 @@ from nni import ClassArgsValidator from nni.tuner import Tuner +from nni.common.hpo_utils import validate_search_space from nni.utils import OptimizeMode, extract_scalar_reward from . import lib_constraint_summation from . import lib_data @@ -152,6 +153,8 @@ def update_search_space(self, search_space): ---------- search_space : dict """ + validate_search_space(search_space, ['choice', 'randint', 'uniform', 'quniform']) + self.x_bounds = [[] for i in range(len(search_space))] self.x_types = [NONE_TYPE for i in range(len(search_space))] diff --git a/nni/algorithms/hpo/smac_tuner/smac_tuner.py b/nni/algorithms/hpo/smac_tuner/smac_tuner.py index fdaed49d8f..68f1d07ff5 100644 --- a/nni/algorithms/hpo/smac_tuner/smac_tuner.py +++ b/nni/algorithms/hpo/smac_tuner/smac_tuner.py @@ -21,6 +21,7 @@ import nni from nni import ClassArgsValidator +from nni.common.hpo_utils import validate_search_space from nni.tuner import Tuner from nni.utils import OptimizeMode, extract_scalar_reward @@ -143,6 +144,7 @@ def update_search_space(self, search_space): The format could be referred to search space spec (https://nni.readthedocs.io/en/latest/Tutorial/SearchSpaceSpec.html). """ self.logger.info('update search space in SMAC.') + validate_search_space(search_space, ['choice', 'randint', 'uniform', 'quniform', 'loguniform']) if not self.update_ss_done: self.categorical_dict = generate_scenario(search_space) if self.categorical_dict is None: diff --git a/nni/common/hpo_utils.py b/nni/common/hpo_utils.py new file mode 100644 index 0000000000..f99e97eff1 --- /dev/null +++ b/nni/common/hpo_utils.py @@ -0,0 +1,75 @@ +import logging +from typing import Any, List, Optional + +common_search_space_types = [ + 'choice', + 'randint', + 'uniform', + 'quniform', + 'loguniform', + 'qloguniform', + 'normal', + 'qnormal', + 'lognormal', + 'qlognormal', +] + +def validate_search_space( + search_space: Any, + support_types: Optional[List[str]] = None, + raise_exception: bool = False # for now, in case false positive + ) -> bool: + + if not raise_exception: + try: + validate_search_space(search_space, support_types, True) + return True + except ValueError as e: + logging.getLogger(__name__).error(e.args[0]) + return False + + if support_types is None: + support_types = common_search_space_types + + if not isinstance(search_space, dict): + raise ValueError(f'search space is a {type(search_space).__name__}, expect a dict : {repr(search_space)}') + + for name, spec in search_space.items(): + if not isinstance(spec, dict): + raise ValueError(f'search space "{name}" is a {type(spec).__name__}, expect a dict : {repr(spec)}') + if '_type' not in spec or '_value' not in spec: + raise ValueError(f'search space "{name}" does not have "_type" or "_value" : {spec}') + type_ = spec['_type'] + if type_ not in support_types: + raise ValueError(f'search space "{name}" has unsupported type "{type_}" : {spec}') + args = spec['_value'] + if not isinstance(args, list): + raise ValueError(f'search space "{name}"\'s value is not a list : {spec}') + + if type_ == 'choice': + continue + + if type_.startswith('q'): + if len(args) != 3: + raise ValueError(f'search space "{name}" ({type_}) must have 3 values : {spec}') + else: + if len(args) != 2: + raise ValueError(f'search space "{name}" ({type_}) must have 2 values : {spec}') + + if type_ == 'randint': + if not all(isinstance(arg, int) for arg in args): + raise ValueError(f'search space "{name}" ({type_}) must have int values : {spec}') + else: + if not all(isinstance(arg, (float, int)) for arg in args): + raise ValueError(f'search space "{name}" ({type_}) must have float values : {spec}') + + if 'normal' not in type_: + if args[0] >= args[1]: + raise ValueError(f'search space "{name}" ({type_}) must have high > low : {spec}') + if 'log' in type_ and args[0] <= 0: + raise ValueError(f'search space "{name}" ({type_}) must have low > 0 : {spec}') + else: + if args[1] <= 0: + raise ValueError(f'search space "{name}" ({type_}) must have sigma > 0 : {spec}') + + return True diff --git a/test/ut/sdk/test_hpo_utils.py b/test/ut/sdk/test_hpo_utils.py new file mode 100644 index 0000000000..e0f6d12294 --- /dev/null +++ b/test/ut/sdk/test_hpo_utils.py @@ -0,0 +1,52 @@ +from nni.common.hpo_utils import validate_search_space + +good = { + 'choice': { '_type': 'choice', '_value': ['a', 'b'] }, + 'randint': { '_type': 'randint', '_value': [1, 10] }, + 'uniform': { '_type': 'uniform', '_value': [0, 1.0] }, + 'quniform': { '_type': 'quniform', '_value': [1, 10, 0.1] }, + 'loguniform': { '_type': 'loguniform', '_value': [0.001, 0.1] }, + 'qloguniform': { '_type': 'qloguniform', '_value': [0.001, 0.1, 0.001] }, + 'normal': { '_type': 'normal', '_value': [0, 0.1] }, + 'qnormal': { '_type': 'qnormal', '_value': [0.5, 0.1, 0.1] }, + 'lognormal': { '_type': 'lognormal', '_value': [0.0, 1] }, + 'qlognormal': { '_type': 'qlognormal', '_value': [-1, 1, 0.1] }, +} +good_partial = { + 'choice': good['choice'], + 'randint': good['randint'], +} + +bad_type = 'x' +bad_spec_type = { 'x': [1, 2, 3] } +bad_fields = { 'x': { 'type': 'choice', 'value': ['a', 'b'] } } +bad_type_name = { 'x': { '_type': 'choic', '_value': ['a'] } } +bad_value = { 'x': { '_type': 'choice', '_value': 'ab' } } +bad_2_args = { 'x': { '_type': 'randint', '_value': [1, 2, 3] } } +bad_3_args = { 'x': { '_type': 'quniform', '_value': [0] } } +bad_int_args = { 'x': { '_type': 'randint', '_value': [1.0, 2.0] } } +bad_float_args = { 'x': { '_type': 'uniform', '_value': ['0.1', '0.2'] } } +bad_low_high = { 'x': { '_type': 'quniform', '_value': [2, 1, 0.1] } } +bad_log = { 'x': { '_type': 'loguniform', '_value': [0, 1] } } +bad_sigma = { 'x': { '_type': 'normal', '_value': [0, 0] } } + +def test_hpo_utils(): + assert validate_search_space(good, raise_exception=False) + assert not validate_search_space(bad_type, raise_exception=False) + assert not validate_search_space(bad_spec_type, raise_exception=False) + assert not validate_search_space(bad_fields, raise_exception=False) + assert not validate_search_space(bad_type_name, raise_exception=False) + assert not validate_search_space(bad_value, raise_exception=False) + assert not validate_search_space(bad_2_args, raise_exception=False) + assert not validate_search_space(bad_3_args, raise_exception=False) + assert not validate_search_space(bad_int_args, raise_exception=False) + assert not validate_search_space(bad_float_args, raise_exception=False) + assert not validate_search_space(bad_low_high, raise_exception=False) + assert not validate_search_space(bad_log, raise_exception=False) + assert not validate_search_space(bad_sigma, raise_exception=False) + + assert validate_search_space(good_partial, ['choice', 'randint'], False) + assert not validate_search_space(good, ['choice', 'randint'], False) + +if __name__ == '__main__': + test_hpo_utils() From 8f01c779baea536f7dcd86370ef4b2265c5e660b Mon Sep 17 00:00:00 2001 From: Ni Hao Date: Wed, 30 Jun 2021 16:19:08 +0800 Subject: [PATCH 6/8] Add shared storage integration test (#3455) --- nni/experiment/config/common.py | 1 + pipelines/integration-test-remote-l2l.yml | 13 +++--- test/config/integration_tests.yml | 29 ++++++++++++- .../config_sharedstorage_remote_azureblob.yml | 43 +++++++++++++++++++ .../config_sharedstorage_remote_nfs.yml | 40 +++++++++++++++++ .../config_sharedstorage_search_space.json | 7 +++ .../config_sharedstorage_trial.py | 24 +++++++++++ test/config/training_service.yml | 3 ++ test/nni_test/nnitest/generate_ts_config.py | 7 ++- test/nni_test/nnitest/run_tests.py | 32 ++++++++++++-- test/nni_test/nnitest/validators.py | 14 ++++++ test/vso_tools/start_docker.py | 3 +- .../azureblobStorageService.ts | 6 +-- 13 files changed, 208 insertions(+), 14 deletions(-) create mode 100644 test/config/sharedstorage_test/config_sharedstorage_remote_azureblob.yml create mode 100644 test/config/sharedstorage_test/config_sharedstorage_remote_nfs.yml create mode 100644 test/config/sharedstorage_test/config_sharedstorage_search_space.json create mode 100644 test/config/sharedstorage_test/config_sharedstorage_trial.py diff --git a/nni/experiment/config/common.py b/nni/experiment/config/common.py index c7cd64a7da..7bcad43c5f 100644 --- a/nni/experiment/config/common.py +++ b/nni/experiment/config/common.py @@ -46,6 +46,7 @@ class CustomAlgorithmConfig(_AlgorithmConfig): class TrainingServiceConfig(ConfigBase): platform: str +@dataclass(init=False) class SharedStorageConfig(ConfigBase): storage_type: str local_mount_point: str diff --git a/pipelines/integration-test-remote-l2l.yml b/pipelines/integration-test-remote-l2l.yml index baf9f479d5..fb417bc1b4 100644 --- a/pipelines/integration-test-remote-l2l.yml +++ b/pipelines/integration-test-remote-l2l.yml @@ -87,27 +87,30 @@ jobs: cd test python3 nni_test/nnitest/generate_ts_config.py \ --ts remote \ - --remote_reuse false \ + --remote_reuse true \ --remote_user nni \ --remote_host $(worker_ip) \ --remote_port $(docker_port) \ --remote_pwd $(password_in_docker) \ - --nni_manager_ip $(manager_ip) + --nni_manager_ip $(manager_ip) \ + --azurestoragetoken $(azureblob_token_test) \ + --nfs_server $(NFS_IP) python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts remote - displayName: Integration test + displayName: Integration test (reuse mode) - script: | cd test python3 nni_test/nnitest/generate_ts_config.py \ --ts remote \ - --remote_reuse true \ + --remote_reuse false \ --remote_user nni \ --remote_host $(worker_ip) \ --remote_port $(docker_port) \ --remote_pwd $(password_in_docker) \ --nni_manager_ip $(manager_ip) python3 nni_test/nnitest/run_tests.py --config config/integration_tests.yml --ts remote - displayName: Integration test (reuse mode) + displayName: Integration test + - task: SSH@0 inputs: diff --git a/test/config/integration_tests.yml b/test/config/integration_tests.yml index 0476f6d922..61d2abe27c 100644 --- a/test/config/integration_tests.yml +++ b/test/config/integration_tests.yml @@ -34,6 +34,34 @@ testCases: # check status of experiment before calling validator experimentStatusCheck: True +- name: shared-storage-remote-azureblob + configFile: test/config/sharedstorage_test/config_sharedstorage_remote_azureblob.yml + config: + sharedStorage: + localMountPoint: /tmp/nnimount/testlocalrootpath + remoteMountPoint: /tmp/nnimount/testremoterootpath + storageAccountName: nennistorage + storageAccountKey: $(azureblob_token_test) + containerName: sharedstorage + validator: + class: FileExistValidator + kwargs: + rootpath: /tmp/nnimount/testlocalrootpath + +# TODO: Enable this case after nfs server is ready +#- name: shared-storage-remote-nfs +# configFile: test/config/sharedstorage_test/config_sharedstorage_remote_nfs.yml +# config: +# sharedStorage: +# localMountPoint: /tmp/nnimount/testlocalrootpath +# remoteMountPoint: /tmp/nnimount/testremoterootpath +# nfsServer: $(NFS_IP) +# exportedDirectory: /home/nni/mnt/ +# validator: +# class: FileExistValidator +# kwargs: +# rootpath: /tmp/nnimount/testlocalrootpath + - name: sklearn-regression configFile: test/config/examples/sklearn-regression.yml @@ -227,4 +255,3 @@ testCases: ######################################################################### - name: customized-tuners-demotuner configFile: test/config/customized_tuners/demotuner-sklearn-classification.yml - diff --git a/test/config/sharedstorage_test/config_sharedstorage_remote_azureblob.yml b/test/config/sharedstorage_test/config_sharedstorage_remote_azureblob.yml new file mode 100644 index 0000000000..20043172fa --- /dev/null +++ b/test/config/sharedstorage_test/config_sharedstorage_remote_azureblob.yml @@ -0,0 +1,43 @@ +authorName: default +experimentName: example_mnist +trialConcurrency: 1 +maxExecDuration: 1h +maxTrialNum: 1 +trainingServicePlatform: remote +searchSpacePath: config_sharedstorage_search_space.json +#choice: true, false +useAnnotation: false +nniManagerIp: 127.0.0.1 +tuner: + #choice: TPE, Random, Anneal, Evolution, BatchTuner, MetisTuner, GPTuner + #SMAC (SMAC should be installed through nnictl) + builtinTunerName: TPE + classArgs: + #choice: maximize, minimize + optimize_mode: maximize +trial: + command: python3 config_sharedstorage_trial.py + codeDir: . + gpuNum: 0 +sharedStorage: + storageType: AzureBlob + localMountPoint: ${your/local/mount/point} + remoteMountPoint: ${your/remote/mount/point} + storageAccountName: ${replace_to_your_storageAccountName} + storageAccountKey: ${replace_to_your_storageAccountKey} + # If you did not set storageAccountKey, you need use `az login` with Azure CLI at first and set resourceGroupName. + # resourceGroupName: ${replace_to_your_resourceGroupName} + containerName: ${replace_to_your_containerName} + # usermount means you have already mount this storage on localMountPoint + # nnimount means nni will try to mount this storage on localMountPoint + # nomount means storage will not mount in local machine, will support partial storages in the future + localMounted: nnimount +#machineList can be empty if the platform is local +machineList: + - ip: 10.1.1.1 + username: bob + passwd: bob123 + #port can be skip if using default ssh port 22 + #port: 22 +remoteConfig: + reuse: true \ No newline at end of file diff --git a/test/config/sharedstorage_test/config_sharedstorage_remote_nfs.yml b/test/config/sharedstorage_test/config_sharedstorage_remote_nfs.yml new file mode 100644 index 0000000000..7bf458b438 --- /dev/null +++ b/test/config/sharedstorage_test/config_sharedstorage_remote_nfs.yml @@ -0,0 +1,40 @@ +authorName: default +experimentName: example_mnist +trialConcurrency: 1 +maxExecDuration: 1h +maxTrialNum: 1 +trainingServicePlatform: remote +searchSpacePath: config_sharedstorage_search_space.json +#choice: true, false +useAnnotation: false +nniManagerIp: 127.0.0.1 +tuner: + #choice: TPE, Random, Anneal, Evolution, BatchTuner, MetisTuner, GPTuner + #SMAC (SMAC should be installed through nnictl) + builtinTunerName: TPE + classArgs: + #choice: maximize, minimize + optimize_mode: maximize +trial: + command: python3 config_sharedstorage_trial.py + codeDir: . + gpuNum: 0 +sharedStorage: + storageType: NFS + localMountPoint: ${your/local/mount/point} + remoteMountPoint: ${your/remote/mount/point} + nfsServer: ${nfs-server-ip} + exportedDirectory: ${nfs/exported/directory} + # usermount means you have already mount this storage on localMountPoint + # nnimount means nni will try to mount this storage on localMountPoint + # nomount means storage will not mount in local machine, will support partial storages in the future + localMounted: nnimount +#machineList can be empty if the platform is local +machineList: + - ip: 10.1.1.1 + username: bob + passwd: bob123 + #port can be skip if using default ssh port 22 + #port: 22 +remoteConfig: + reuse: true \ No newline at end of file diff --git a/test/config/sharedstorage_test/config_sharedstorage_search_space.json b/test/config/sharedstorage_test/config_sharedstorage_search_space.json new file mode 100644 index 0000000000..dd05405e27 --- /dev/null +++ b/test/config/sharedstorage_test/config_sharedstorage_search_space.json @@ -0,0 +1,7 @@ +{ + "dropout_rate":{"_type":"uniform","_value":[0.5, 0.9]}, + "conv_size":{"_type":"choice","_value":[2,3,5,7]}, + "hidden_size":{"_type":"choice","_value":[124, 512, 1024]}, + "batch_size": {"_type":"choice", "_value": [16, 32]}, + "learning_rate":{"_type":"choice","_value":[0.0001, 0.001, 0.01, 0.1]} +} diff --git a/test/config/sharedstorage_test/config_sharedstorage_trial.py b/test/config/sharedstorage_test/config_sharedstorage_trial.py new file mode 100644 index 0000000000..adfc46cba4 --- /dev/null +++ b/test/config/sharedstorage_test/config_sharedstorage_trial.py @@ -0,0 +1,24 @@ +""" +A deep MNIST classifier using convolutional layers. + +This file is a modification of the official pytorch mnist example: +https://github.com/pytorch/examples/blob/master/mnist/main.py +""" +import os +import logging +import nni +logger = logging.getLogger('mnist_AutoML') +if __name__ == '__main__': + try: + logger.debug(os.environ.get('NNI_OUTPUT_DIR')) + filename = os.path.join(os.environ.get('NNI_OUTPUT_DIR'), 'checkingfile.txt') + f = open(filename, "a") + + tuner_params = nni.get_next_parameter() + f.write(str(tuner_params)) + nni.report_final_result(1) + + f.close() + except Exception as exception: + logger.exception(exception) + raise diff --git a/test/config/training_service.yml b/test/config/training_service.yml index 94d2288367..07e809414f 100644 --- a/test/config/training_service.yml +++ b/test/config/training_service.yml @@ -87,6 +87,9 @@ remote: port: username: trainingServicePlatform: remote + sharedStorage: + storageAccountKey: + nfsServer: hybrid: maxExecDuration: 15m nniManagerIp: diff --git a/test/nni_test/nnitest/generate_ts_config.py b/test/nni_test/nnitest/generate_ts_config.py index d406131661..b1208f3856 100644 --- a/test/nni_test/nnitest/generate_ts_config.py +++ b/test/nni_test/nnitest/generate_ts_config.py @@ -74,6 +74,10 @@ def update_training_service_config(args): config[args.ts]['machineList'][0]['passwd'] = args.remote_pwd if args.remote_reuse is not None: config[args.ts]['remoteConfig']['reuse'] = args.remote_reuse.lower() == 'true' + if args.azurestoragetoken is not None: + config[args.ts]['sharedStorage']['storageAccountKey'] = args.azurestoragetoken + if args.nfs_server is not None: + config[args.ts]['sharedStorage']['nfsServer'] = args.nfs_server elif args.ts == 'adl': if args.nni_docker_image is not None: config[args.ts]['trial']['image'] = args.nni_docker_image @@ -118,6 +122,8 @@ def update_training_service_config(args): parser.add_argument("--config_version", type=str, choices=['v1', 'v2'], default='v1') parser.add_argument("--nni_docker_image", type=str) parser.add_argument("--nni_manager_ip", type=str) + parser.add_argument("--azurestoragetoken", type=str) + parser.add_argument("--nfs_server", type=str) # args for PAI parser.add_argument("--pai_user", type=str) parser.add_argument("--pai_pwd", type=str) @@ -131,7 +137,6 @@ def update_training_service_config(args): parser.add_argument("--nni_manager_nfs_mount_path", type=str) parser.add_argument("--container_nfs_mount_path", type=str) # args for kubeflow and frameworkController - parser.add_argument("--nfs_server", type=str) parser.add_argument("--nfs_path", type=str) parser.add_argument("--keyvault_vaultname", type=str) parser.add_argument("--keyvault_name", type=str) diff --git a/test/nni_test/nnitest/run_tests.py b/test/nni_test/nnitest/run_tests.py index f87416a5fd..ac422207d9 100644 --- a/test/nni_test/nnitest/run_tests.py +++ b/test/nni_test/nnitest/run_tests.py @@ -23,7 +23,7 @@ it_variables = {} -def update_training_service_config(config, training_service, config_file_path): +def update_training_service_config(config, training_service, config_file_path, nni_source_dir): it_ts_config = get_yml_content(os.path.join('config', 'training_service.yml')) # hack for kubeflow trial config @@ -38,7 +38,7 @@ def update_training_service_config(config, training_service, config_file_path): config['trial'].pop('command') if 'gpuNum' in config['trial']: config['trial'].pop('gpuNum') - + if training_service == 'adl': # hack for adl trial config, codeDir in adl mode refers to path in container containerCodeDir = config['trial']['codeDir'] @@ -52,6 +52,18 @@ def update_training_service_config(config, training_service, config_file_path): containerCodeDir = config['trial']['codeDir'].replace('../../../', '/') it_ts_config[training_service]['trial']['codeDir'] = containerCodeDir it_ts_config[training_service]['trial']['command'] = 'cd {0} && {1}'.format(containerCodeDir, config['trial']['command']) + + if training_service == 'remote': + testcase_config = get_yml_content(nni_source_dir + config_file_path) + sharedStorage = testcase_config.get('sharedStorage') + if sharedStorage is None: + it_ts_config[training_service].pop('sharedStorage') + elif str(sharedStorage.get('storageType')).lower() == 'nfs': + it_ts_config[training_service].get('sharedStorage').pop('storageAccountKey') + elif str(sharedStorage.get('storageType')).lower() == 'azureblob': + it_ts_config[training_service].get('sharedStorage').pop('nfsServer') + else: + it_ts_config[training_service].pop('sharedStorage') if training_service == 'hybrid': it_ts_config = get_yml_content(os.path.join('config', 'training_service_v2.yml')) @@ -75,7 +87,7 @@ def prepare_config_file(test_case_config, it_config, args): # apply training service config # user's gpuNum, logCollection config is overwritten by the config in training_service.yml # the hack for kubeflow should be applied at last step - update_training_service_config(test_yml_config, args.ts, test_case_config['configFile']) + update_training_service_config(test_yml_config, args.ts, test_case_config['configFile'], args.nni_source_dir) # generate temporary config yml file to launch experiment new_config_file = config_path + '.tmp' @@ -238,6 +250,15 @@ def match_training_service(test_case_config, cur_training_service): return True return False +def match_remoteConfig(test_case_config, nni_source_dir): + trainingservice_config = get_yml_content(os.path.join('config', 'training_service.yml')) + trainingservice_config_reuse_value = str(trainingservice_config['remote']['remoteConfig']['reuse']).lower() + testcase_config = get_yml_content(nni_source_dir + test_case_config['configFile']) + if testcase_config.get('remoteConfig') is not None: + if testcase_config['remoteConfig'].get('reuse') is not None: + return str(testcase_config['remoteConfig']['reuse']).lower() == trainingservice_config_reuse_value + return True + def run(args): it_config = get_yml_content(args.config) @@ -264,8 +285,13 @@ def run(args): print('skipped {}, training service {} not match [{}]'.format( name, args.ts, test_case_config['trainingService'])) continue + # remote mode need more time to cleanup if args.ts == 'remote' or args.ts == 'hybrid': + if args.ts == 'remote': + if not match_remoteConfig(test_case_config, args.nni_source_dir): + print('skipped {}, remoteConfig not match.'.format(name)) + continue wait_for_port_available(8080, 240) else: wait_for_port_available(8080, 60) diff --git a/test/nni_test/nnitest/validators.py b/test/nni_test/nnitest/validators.py index 3352899d07..3fdbf94eaa 100644 --- a/test/nni_test/nnitest/validators.py +++ b/test/nni_test/nnitest/validators.py @@ -97,3 +97,17 @@ def __call__(self, rest_endpoint, experiment_dir, nni_source_dir, **kwargs): print(exp.get_job_statistics()) print(exp.get_experiment_status()) print(exp.list_trial_jobs()) + +class FileExistValidator(ITValidator): + def __call__(self, rest_endpoint, experiment_dir, nni_source_dir, **kwargs): + print(rest_endpoint) + exp_id = osp.split(experiment_dir)[-1] + rootpath = kwargs.get('rootpath') + + metrics = requests.get(METRICS_URL).json() + for metric in metrics: + trial_id = metric['trialJobId'] + checkpath = osp.join(rootpath, 'nni', exp_id, 'trials', trial_id, 'nnioutput', 'checkingfile.txt') + print('Checking shared storage log exists on trial ',trial_id) + assert osp.exists(checkpath) + diff --git a/test/vso_tools/start_docker.py b/test/vso_tools/start_docker.py index 3057162f71..817e229285 100644 --- a/test/vso_tools/start_docker.py +++ b/test/vso_tools/start_docker.py @@ -25,8 +25,9 @@ password = sys.argv[3] run_command(f'docker build --build-arg NNI_RELEASE={version} -t nnidev/nni-nightly .') -run_command(f'docker run -d -t -p {port}:22 --name {container} nnidev/nni-nightly') +run_command(f'docker run --privileged -d -t -p {port}:22 --name {container} nnidev/nni-nightly') run_command(f'docker exec {container} useradd --create-home --password {password} nni') run_command(['docker', 'exec', container, 'bash', '-c', f'echo "nni:{password}" | chpasswd']) +run_command(['docker', 'exec', container, 'bash', '-c', 'echo "nni ALL=(ALL:ALL) NOPASSWD:ALL" >> /etc/sudoers']) run_command(f'docker exec {container} service ssh start') set_variable('docker_port', port) diff --git a/ts/nni_manager/training_service/reusable/shared_storages/azureblobStorageService.ts b/ts/nni_manager/training_service/reusable/shared_storages/azureblobStorageService.ts index 7a0185f861..1335cddeb5 100644 --- a/ts/nni_manager/training_service/reusable/shared_storages/azureblobStorageService.ts +++ b/ts/nni_manager/training_service/reusable/shared_storages/azureblobStorageService.ts @@ -34,13 +34,13 @@ fi id=$(lsb_release -i | cut -c16- | sed s/[[:space:]]//g) version=$(lsb_release -r | cut -c9- | sed s/[[:space:]]//g) -if [ $id = "Ubuntu" ] +if [ "$id" = "Ubuntu" ] then wget https://packages.microsoft.com/config/ubuntu/$version/packages-microsoft-prod.deb - sudo dpkg -i packages-microsoft-prod.deb + sudo DEBIAN_FRONTEND=noninteractive dpkg -i packages-microsoft-prod.deb sudo apt-get update sudo apt-get install -y blobfuse fuse -elif [ $id = "CentOS" ] || [ $id = "RHEL" ] +elif [ "$id" = "CentOS" ] || [ "$id" = "RHEL" ] then sudo rpm -Uvh https://packages.microsoft.com/config/rhel/$(echo $version | cut -c1)/packages-microsoft-prod.rpm sudo yum install -y blobfuse fuse From 7f9642741cfb60cd7bc988896c0c781790badf16 Mon Sep 17 00:00:00 2001 From: Yuge Zhang Date: Wed, 30 Jun 2021 16:22:10 +0800 Subject: [PATCH 7/8] Disable not-callable in pylint (#3887) --- .../hpo/networkmorphism_tuner/graph_transformer.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nni/algorithms/hpo/networkmorphism_tuner/graph_transformer.py b/nni/algorithms/hpo/networkmorphism_tuner/graph_transformer.py index b03112d63b..28fda7acb9 100644 --- a/nni/algorithms/hpo/networkmorphism_tuner/graph_transformer.py +++ b/nni/algorithms/hpo/networkmorphism_tuner/graph_transformer.py @@ -96,21 +96,21 @@ def create_new_layer(layer, n_dim): new_layer = StubDense(input_shape[0], input_shape[0]) elif layer_class == get_dropout_class(n_dim): - new_layer = layer_class(Constant.DENSE_DROPOUT_RATE) + new_layer = layer_class(Constant.DENSE_DROPOUT_RATE) # pylint: disable=not-callable elif layer_class == get_conv_class(n_dim): - new_layer = layer_class( + new_layer = layer_class( # pylint: disable=not-callable input_shape[-1], input_shape[-1], sample((1, 3, 5), 1)[0], stride=1 ) elif layer_class == get_batch_norm_class(n_dim): - new_layer = layer_class(input_shape[-1]) + new_layer = layer_class(input_shape[-1]) # pylint: disable=not-callable elif layer_class == get_pooling_class(n_dim): - new_layer = layer_class(sample((1, 3, 5), 1)[0]) + new_layer = layer_class(sample((1, 3, 5), 1)[0]) # pylint: disable=not-callable else: - new_layer = layer_class() + new_layer = layer_class() # pylint: disable=not-callable return new_layer From e50270ccd02d7231de2fc033ec0715fdebe9fdf0 Mon Sep 17 00:00:00 2001 From: liuzhe-lz <40699903+liuzhe-lz@users.noreply.github.com> Date: Tue, 6 Jul 2021 13:26:40 +0800 Subject: [PATCH 8/8] fix alerts (#3888) Co-authored-by: liuzhe --- ts/nni_manager/package.json | 4 +- ts/nni_manager/yarn.lock | 38 +++++-------------- ts/webui/package.json | 6 ++- ts/webui/yarn.lock | 75 ++++++++----------------------------- 4 files changed, 32 insertions(+), 91 deletions(-) diff --git a/ts/nni_manager/package.json b/ts/nni_manager/package.json index 0ad8f5f78b..8bf1e9488d 100644 --- a/ts/nni_manager/package.json +++ b/ts/nni_manager/package.json @@ -72,7 +72,9 @@ "npm": ">=7.16.0", "y18n": ">=5.0.8", "yargs-parser": ">=20.2.7", - "joi": ">=17.4.0" + "joi": ">=17.4.0", + "node-forge": ">=0.10.0", + "glob-parent": ">=6.0.0" }, "engines": { "node": "^16.3.0" diff --git a/ts/nni_manager/yarn.lock b/ts/nni_manager/yarn.lock index b805155705..c0578e53f2 100644 --- a/ts/nni_manager/yarn.lock +++ b/ts/nni_manager/yarn.lock @@ -2455,18 +2455,10 @@ getpass@^0.1.1: dependencies: assert-plus "^1.0.0" -glob-parent@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" - integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= - dependencies: - is-glob "^3.1.0" - path-dirname "^1.0.0" - -glob-parent@^5.1.0, glob-parent@^5.1.2, glob-parent@~5.1.0: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== +glob-parent@>=6.0.0, glob-parent@^3.0.0, glob-parent@^5.1.0, glob-parent@^5.1.2, glob-parent@~5.1.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.0.tgz#f851b59b388e788f3a44d63fab50382b2859c33c" + integrity sha512-Hdd4287VEJcZXUwv1l8a+vXC1GjOQqXe+VS30w/ypihpcnu9M1n3xeYeJu5CBpeEQj2nAab2xxz28GuA3vp4Ww== dependencies: is-glob "^4.0.1" @@ -2924,7 +2916,7 @@ is-extendable@^0.1.1: resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= -is-extglob@^2.1.0, is-extglob@^2.1.1: +is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= @@ -2946,13 +2938,6 @@ is-fullwidth-code-point@^3.0.0: resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== -is-glob@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" - integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= - dependencies: - is-extglob "^2.1.0" - is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" @@ -3907,10 +3892,10 @@ node-addon-api@^3.0.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161" integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A== -node-forge@^0.8.5: - version "0.8.5" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.8.5.tgz#57906f07614dc72762c84cef442f427c0e1b86ee" - integrity sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q== +node-forge@>=0.10.0, node-forge@^0.8.5: + version "0.10.0" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" + integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== node-gyp@3.x: version "3.8.0" @@ -4577,11 +4562,6 @@ parseurl@~1.3.3: resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== -path-dirname@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" - integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= - path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" diff --git a/ts/webui/package.json b/ts/webui/package.json index 56114b0f2f..72dc50d83a 100644 --- a/ts/webui/package.json +++ b/ts/webui/package.json @@ -115,7 +115,11 @@ "yargs": ">=17.0.1", "acorn": ">=8.4.0", "y18n": ">=5.0.8", - "serialize-javascript": ">=5.0.1" + "serialize-javascript": ">=5.0.1", + "css-what": ">=5.0.1", + "browserslist": ">=4.16.6", + "trim-newlines": ">=4.0.2", + "glob-parent": ">=6.0.0" }, "jest": { "verbose": true diff --git a/ts/webui/yarn.lock b/ts/webui/yarn.lock index 81ee32521f..eaff62ddb9 100644 --- a/ts/webui/yarn.lock +++ b/ts/webui/yarn.lock @@ -3456,17 +3456,7 @@ browser-process-hrtime@^1.0.0: resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== -browserslist@4.14.2: - version "4.14.2" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.14.2.tgz#1b3cec458a1ba87588cc5e9be62f19b6d48813ce" - integrity sha512-HI4lPveGKUR0x2StIz+2FXfDk9SfVMrxn6PLh1JeGUwcuoDkdKZebWiyLRJ68iIPDpMI4JLVDf7S7XzslgWOhw== - dependencies: - caniuse-lite "^1.0.30001125" - electron-to-chromium "^1.3.564" - escalade "^3.0.2" - node-releases "^1.1.61" - -browserslist@^4.0.0, browserslist@^4.12.0, browserslist@^4.14.5, browserslist@^4.16.0, browserslist@^4.16.6, browserslist@^4.6.4: +browserslist@4.14.2, browserslist@>=4.16.6, browserslist@^4.0.0, browserslist@^4.12.0, browserslist@^4.14.5, browserslist@^4.16.0, browserslist@^4.16.6, browserslist@^4.6.4: version "4.16.6" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== @@ -3625,7 +3615,7 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000981, caniuse-lite@^1.0.30001109, caniuse-lite@^1.0.30001125, caniuse-lite@^1.0.30001219: +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000981, caniuse-lite@^1.0.30001109, caniuse-lite@^1.0.30001219: version "1.0.30001237" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001237.tgz#4b7783661515b8e7151fc6376cfd97f0e427b9e5" integrity sha512-pDHgRndit6p1NR2GhzMbQ6CkRrp4VKuSsqbcLeOQppYPKOYkKT/6ZvZDvKJUqcmtyWIAHuZq3SVS2vc1egCZzw== @@ -4280,17 +4270,7 @@ css-tree@^1.1.2: mdn-data "2.0.14" source-map "^0.6.1" -css-what@^3.2.1: - version "3.4.2" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" - integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== - -css-what@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-4.0.0.tgz#35e73761cab2eeb3d3661126b23d7aa0e8432233" - integrity sha512-teijzG7kwYfNVsUh2H/YN62xW3KK9YhXEgSlbxMlcyjPNvdKJqFx5lrwlJgoFP1ZHlB89iGDlo/JyshKeRhv5A== - -css-what@^5.0.0: +css-what@>=5.0.1, css-what@^3.2.1, css-what@^4.0.0, css-what@^5.0.0: version "5.0.1" resolved "https://registry.yarnpkg.com/css-what/-/css-what-5.0.1.tgz#3efa820131f4669a8ac2408f9c32e7c7de9f4cad" integrity sha512-FYDTSHb/7KXsWICVsxdmiExPjCfRC4qRFBdVwv7Ax9hMnvMmEjP9RfxTEZ3qPZGmADDn2vAKSo9UcN1jKVYscg== @@ -5193,7 +5173,7 @@ ejs@^2.6.1: resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.7.4.tgz#48661287573dcc53e366c7a1ae52c3a120eec9ba" integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA== -electron-to-chromium@^1.3.564, electron-to-chromium@^1.3.723: +electron-to-chromium@^1.3.723: version "1.3.752" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.752.tgz#0728587f1b9b970ec9ffad932496429aef750d09" integrity sha512-2Tg+7jSl3oPxgsBsWKh5H83QazTkmWG/cnNwJplmyZc7KcN61+I10oUgaXSVk/NwfvN3BdkKDR4FYuRBQQ2v0A== @@ -5322,7 +5302,7 @@ es-to-primitive@^1.2.1: is-date-object "^1.0.1" is-symbol "^1.0.2" -escalade@^3.0.2, escalade@^3.1.1: +escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== @@ -6188,18 +6168,10 @@ getpass@^0.1.1: dependencies: assert-plus "^1.0.0" -glob-parent@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" - integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= - dependencies: - is-glob "^3.1.0" - path-dirname "^1.0.0" - -glob-parent@^5.1.0, glob-parent@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== +glob-parent@>=6.0.0, glob-parent@^3.1.0, glob-parent@^5.1.0, glob-parent@^5.1.2: + version "6.0.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.0.tgz#f851b59b388e788f3a44d63fab50382b2859c33c" + integrity sha512-Hdd4287VEJcZXUwv1l8a+vXC1GjOQqXe+VS30w/ypihpcnu9M1n3xeYeJu5CBpeEQj2nAab2xxz28GuA3vp4Ww== dependencies: is-glob "^4.0.1" @@ -7056,7 +7028,7 @@ is-extendable@^1.0.1: dependencies: is-plain-object "^2.0.4" -is-extglob@^2.1.0, is-extglob@^2.1.1: +is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= @@ -7088,13 +7060,6 @@ is-generator-fn@^2.0.0: resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== -is-glob@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" - integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= - dependencies: - is-extglob "^2.1.0" - is-glob@^4.0.0, is-glob@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" @@ -9034,7 +8999,7 @@ node-modules-regexp@^1.0.0: resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40" integrity sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA= -node-releases@^1.1.61, node-releases@^1.1.71: +node-releases@^1.1.71: version "1.1.73" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.73.tgz#dd4e81ddd5277ff846b80b52bb40c49edf7a7b20" integrity sha512-uW7fodD6pyW2FZNZnp/Z3hvWKeEW1Y8R1+1CnErE8cXFXzl5blBOoVB41CvMer6P6Q0S5FXDwcHgFd1Wj0U9zg== @@ -9784,11 +9749,6 @@ pascalcase@^0.1.1: resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= -path-dirname@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" - integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= - path-exists@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" @@ -12718,15 +12678,10 @@ treeverse@^1.0.4: resolved "https://registry.yarnpkg.com/treeverse/-/treeverse-1.0.4.tgz#a6b0ebf98a1bca6846ddc7ecbc900df08cb9cd5f" integrity sha512-whw60l7r+8ZU8Tu/Uc2yxtc4ZTZbR/PF3u1IPNKGQ6p8EICLb3Z2lAgoqw9bqYd8IkgnsaOcLzYHFckjqNsf0g== -trim-newlines@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" - integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= - -trim-newlines@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144" - integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw== +trim-newlines@>=4.0.2, trim-newlines@^1.0.0, trim-newlines@^3.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-4.0.2.tgz#d6aaaf6a0df1b4b536d183879a6b939489808c7c" + integrity sha512-GJtWyq9InR/2HRiLZgpIKv+ufIKrVrvjQWEj7PxAXNc5dwbNJkqhAUoAGgzRmULAnoOM5EIpveYd3J2VeSAIew== trough@^1.0.0: version "1.0.5"