From a9d7a25cdf5f6d2f3876209e194106d017dac240 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Oct 2019 16:14:16 -0700 Subject: [PATCH 1/9] SDK - Client - Added a way to set experiment name using environment variables This is useful for launching notebooks or pipeline files that submit themselves for execution. --- sdk/python/kfp/_client.py | 9 ++++++++- test/sample-test/check_notebook_results.py | 5 +++-- test/sample-test/sample_test_launcher.py | 3 --- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/sdk/python/kfp/_client.py b/sdk/python/kfp/_client.py index 2bf095d2217..e1eda00e1dd 100644 --- a/sdk/python/kfp/_client.py +++ b/sdk/python/kfp/_client.py @@ -64,6 +64,8 @@ def camel_case_to_snake_case(name): KF_PIPELINES_ENDPOINT_ENV = 'KF_PIPELINES_ENDPOINT' KF_PIPELINES_UI_ENDPOINT_ENV = 'KF_PIPELINES_UI_ENDPOINT' +KF_PIPELINES_DEFAULT_EXPERIMENT_NAME = 'KF_PIPELINES_DEFAULT_EXPERIMENT_NAME' +KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME = 'KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME' class Client(object): """ API Client for KubeFlow Pipeline. @@ -365,7 +367,12 @@ def __str__(self): #TODO: Check arguments against the pipeline function pipeline_name = os.path.basename(pipeline_file) - experiment_name = experiment_name or 'Default' + experiment_name = experiment_name or os.environ.get(KF_PIPELINES_DEFAULT_EXPERIMENT_NAME, None) + overridden_experiment_name = os.environ.get(KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME, experiment_name) + if overridden_experiment_name != experiment_name: + import warnings + warnings.warn('Changing experiment name from "{}" to "{}".'.format(experiment_name, overridden_experiment_name)) + experiment_name = overridden_experiment_name or 'Default' run_name = run_name or pipeline_name + ' ' + datetime.now().strftime('%Y-%m-%d %H-%M-%S') experiment = self.create_experiment(name=experiment_name) run_info = self.run_pipeline(experiment.id, run_name, pipeline_file, arguments) diff --git a/test/sample-test/check_notebook_results.py b/test/sample-test/check_notebook_results.py index 90420971050..8faa407eb8d 100644 --- a/test/sample-test/check_notebook_results.py +++ b/test/sample-test/check_notebook_results.py @@ -34,11 +34,12 @@ def __init__(self, testname, result, run_pipeline, namespace='kubeflow'): self._exit_code = None self._run_pipeline = run_pipeline self._namespace = namespace + self._experiment_name = self._testname + '-test' def run(self): """ Run the notebook sample as a python script. """ self._exit_code = str( - subprocess.call(['ipython', '%s.py' % self._testname])) + subprocess.call(['ipython', '%s.py' % self._testname], env={'KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME': self._experiment_name})) def check(self): @@ -63,7 +64,7 @@ def check(self): test_timeout = raw_args['test_timeout'] if self._run_pipeline: - experiment = self._testname + '-test' + experiment = self._experiment_name ###### Initialization ###### host = 'ml-pipeline.%s.svc.cluster.local:8888' % self._namespace client = Client(host=host) diff --git a/test/sample-test/sample_test_launcher.py b/test/sample-test/sample_test_launcher.py index c8b5bb8d0b5..5c47f186a3c 100644 --- a/test/sample-test/sample_test_launcher.py +++ b/test/sample-test/sample_test_launcher.py @@ -128,9 +128,6 @@ def _compile(self): if 'run_pipeline' in raw_args.keys(): self._run_pipeline = raw_args['run_pipeline'] - if self._run_pipeline: - nb_params['experiment_name'] = self._test_name + '-test' - pm.execute_notebook( input_path='%s.ipynb' % self._test_name, output_path='%s.ipynb' % self._test_name, From 5c7bd26b45babcc37568774ab0cc8129797f1157 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Oct 2019 17:05:03 -0700 Subject: [PATCH 2/9] Switched to subprocess.run which supports env --- test/sample-test/check_notebook_results.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/sample-test/check_notebook_results.py b/test/sample-test/check_notebook_results.py index 8faa407eb8d..0dbfd835abf 100644 --- a/test/sample-test/check_notebook_results.py +++ b/test/sample-test/check_notebook_results.py @@ -38,8 +38,7 @@ def __init__(self, testname, result, run_pipeline, namespace='kubeflow'): def run(self): """ Run the notebook sample as a python script. """ - self._exit_code = str( - subprocess.call(['ipython', '%s.py' % self._testname], env={'KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME': self._experiment_name})) + self._exit_code = subprocess.run(['ipython', '%s.py' % self._testname], env={'KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME': self._experiment_name}).returncode def check(self): From fd5967b99d451f5c6db86c57a6c336c987343c4a Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Oct 2019 18:06:44 -0700 Subject: [PATCH 3/9] Setting the environment variable differently Looks like `subprocess.run` uses `PATH` to search for the program. --- test/sample-test/check_notebook_results.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/sample-test/check_notebook_results.py b/test/sample-test/check_notebook_results.py index 0dbfd835abf..867b9027c46 100644 --- a/test/sample-test/check_notebook_results.py +++ b/test/sample-test/check_notebook_results.py @@ -38,7 +38,10 @@ def __init__(self, testname, result, run_pipeline, namespace='kubeflow'): def run(self): """ Run the notebook sample as a python script. """ - self._exit_code = subprocess.run(['ipython', '%s.py' % self._testname], env={'KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME': self._experiment_name}).returncode + import os + env = os.environ.copy() + env['KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME'] = self._experiment_name + self._exit_code = subprocess.run(['ipython', '%s.py' % self._testname], env=env).returncode def check(self): From 2fa3b4ce4d9c5afc48c5c8985b9b6d4fca93041d Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Wed, 2 Oct 2019 19:54:38 -0700 Subject: [PATCH 4/9] Convert return code to string --- test/sample-test/check_notebook_results.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/sample-test/check_notebook_results.py b/test/sample-test/check_notebook_results.py index 867b9027c46..25f2a0d55bd 100644 --- a/test/sample-test/check_notebook_results.py +++ b/test/sample-test/check_notebook_results.py @@ -41,7 +41,7 @@ def run(self): import os env = os.environ.copy() env['KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME'] = self._experiment_name - self._exit_code = subprocess.run(['ipython', '%s.py' % self._testname], env=env).returncode + self._exit_code = str(subprocess.run(['ipython', '%s.py' % self._testname], env=env).returncode) def check(self): From 1814f60ef7bdfda25528a8e0ff5b7046dda3a076 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 10 Oct 2019 16:07:17 -0700 Subject: [PATCH 5/9] Changed the way the experiment name is being set --- test/sample-test/check_notebook_results.py | 12 +++++------- test/sample-test/run_sample_test.py | 7 ++++--- test/sample-test/sample_test_launcher.py | 12 ++++++++++-- 3 files changed, 19 insertions(+), 12 deletions(-) diff --git a/test/sample-test/check_notebook_results.py b/test/sample-test/check_notebook_results.py index 25f2a0d55bd..2d3dc3db3e0 100644 --- a/test/sample-test/check_notebook_results.py +++ b/test/sample-test/check_notebook_results.py @@ -21,28 +21,26 @@ class NoteBookChecker(object): - def __init__(self, testname, result, run_pipeline, namespace='kubeflow'): + def __init__(self, testname, result, run_pipeline, experiment_name, namespace='kubeflow'): """ Util class for checking notebook sample test running results. :param testname: test name in the json xml. :param result: name of the file that stores the test result :param run_pipeline: whether to submit for a pipeline run. :param namespace: where the pipeline system is deployed. + :param experiment_name: Name of the experiment to monitor """ self._testname = testname self._result = result self._exit_code = None self._run_pipeline = run_pipeline self._namespace = namespace - self._experiment_name = self._testname + '-test' + self._experiment_name = experiment_name def run(self): """ Run the notebook sample as a python script. """ - import os - env = os.environ.copy() - env['KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME'] = self._experiment_name - self._exit_code = str(subprocess.run(['ipython', '%s.py' % self._testname], env=env).returncode) - + self._exit_code = str( + subprocess.call(['ipython', '%s.py' % self._testname])) def check(self): """ Check the pipeline running results of the notebook sample. """ diff --git a/test/sample-test/run_sample_test.py b/test/sample-test/run_sample_test.py index 5eed429add6..28e3f0cb8e6 100644 --- a/test/sample-test/run_sample_test.py +++ b/test/sample-test/run_sample_test.py @@ -25,7 +25,7 @@ class PySampleChecker(object): - def __init__(self, testname, input, output, result, namespace='kubeflow'): + def __init__(self, testname, input, output, result, experiment_name, namespace='kubeflow'): """Util class for checking python sample test running results. :param testname: test name. @@ -33,8 +33,10 @@ def __init__(self, testname, input, output, result, namespace='kubeflow'): :param output: The path of the test output. :param result: The path of the test result that will be exported. :param namespace: namespace of the deployed pipeline system. Default: kubeflow + :param experiment_name: Name of the experiment to monitor """ self._testname = testname + self._experiment_name = experiment_name self._input = input self._output = output self._result = result @@ -68,8 +70,7 @@ def run(self): exit(1) ###### Create Experiment ###### - experiment_name = self._testname + ' sample experiment' - response = self._client.create_experiment(experiment_name) + response = self._client.create_experiment(self._experiment_name) self._experiment_id = response.id utils.add_junit_test(self._test_cases, 'create experiment', True) diff --git a/test/sample-test/sample_test_launcher.py b/test/sample-test/sample_test_launcher.py index 5c47f186a3c..06f20eea340 100644 --- a/test/sample-test/sample_test_launcher.py +++ b/test/sample-test/sample_test_launcher.py @@ -155,10 +155,16 @@ def run_test(self): self._compile() self._injection() + # Overriding the experiment name of pipeline runs + experiment_name = self._test_name + '-test' + os.environ['KF_PIPELINES_OVERRIDE_EXPERIMENT_NAME'] = experiment_name + if self._is_notebook: nbchecker = NoteBookChecker(testname=self._test_name, result=self._sample_test_result, - run_pipeline=self._run_pipeline) + run_pipeline=self._run_pipeline, + experiment_name=experiment_name, + ) nbchecker.run() os.chdir(TEST_DIR) nbchecker.check() @@ -173,7 +179,9 @@ def run_test(self): input=input_file, output=self._sample_test_output, result=self._sample_test_result, - namespace=self._namespace) + namespace=self._namespace, + experiment_name=experiment_name, + ) pysample_checker.run() pysample_checker.check() From c1a99c16991f36a091475789304033b64b10bef0 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 10 Oct 2019 18:22:37 -0700 Subject: [PATCH 6/9] Changed how the notebook installs the SDK Notebook is overriding the SDK that's being tested. --- samples/core/lightweight_component/lightweight_component.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/core/lightweight_component/lightweight_component.ipynb b/samples/core/lightweight_component/lightweight_component.ipynb index b0465f152f9..f3c0a233356 100644 --- a/samples/core/lightweight_component/lightweight_component.ipynb +++ b/samples/core/lightweight_component/lightweight_component.ipynb @@ -39,7 +39,7 @@ "outputs": [], "source": [ "# Install the SDK\n", - "!pip3 install kfp --upgrade" + "!pip3 install 'kfp>=0.1.31.2' --quiet" ] }, { From 7ccf6e98cd4aca9f0fd4fa2dc4068066aa1a6caa Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 10 Oct 2019 20:50:23 -0700 Subject: [PATCH 7/9] Not installing the KFP SDK package --- samples/core/lightweight_component/lightweight_component.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/core/lightweight_component/lightweight_component.ipynb b/samples/core/lightweight_component/lightweight_component.ipynb index f3c0a233356..5bb0ce7298e 100644 --- a/samples/core/lightweight_component/lightweight_component.ipynb +++ b/samples/core/lightweight_component/lightweight_component.ipynb @@ -39,7 +39,7 @@ "outputs": [], "source": [ "# Install the SDK\n", - "!pip3 install 'kfp>=0.1.31.2' --quiet" + "#!pip3 install 'kfp>=0.1.31.2' --quiet" ] }, { From a41aa7a0de607a0da39e5c2a8440b8261c39cacc Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Fri, 11 Oct 2019 18:33:48 -0700 Subject: [PATCH 8/9] Removed the experiment_name from samples and configs. --- samples/README.md | 6 +++--- .../arena-samples/standalonejob/standalone_pipeline.ipynb | 3 --- .../contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb | 7 ++----- .../image-captioning-gcp/Image Captioning TF 2.0.ipynb | 3 +-- samples/core/ai_platform/ai_platform.ipynb | 5 ++--- samples/core/component_build/component_build.ipynb | 3 +-- samples/core/dataflow/dataflow.ipynb | 5 ++--- samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb | 3 +-- .../core/lightweight_component/lightweight_component.ipynb | 3 +-- samples/core/multiple_outputs/multiple_outputs.ipynb | 5 ++--- test/sample-test/configs/ai_platform.config.yaml | 1 - test/sample-test/configs/component_build.config.yaml | 1 - test/sample-test/configs/dataflow.config.yaml | 1 - test/sample-test/configs/kubeflow_tf_serving.config.yaml | 1 - test/sample-test/configs/multiple_outputs.config.yaml | 1 - 15 files changed, 15 insertions(+), 33 deletions(-) diff --git a/samples/README.md b/samples/README.md index e4d453d9003..1558a1469a4 100644 --- a/samples/README.md +++ b/samples/README.md @@ -75,11 +75,11 @@ For better readability and integrations with the sample test infrastructure, sam * The sample file should be either `*.py` or `*.ipynb`, and its file name is consistent with its directory name. * For `*.py` sample, it's recommended to have a main invoking `kfp.compiler.Compiler().compile()` to compile the pipeline function into pipeline yaml spec. -* For `*.ipynb` sample, parameters (e.g., `experiment_name` and `project_name`) +* For `*.ipynb` sample, parameters (e.g., `project_name`) should be defined in a dedicated cell and tagged as parameter. (If the author would like the sample test infra to run it by setting the `run_pipeline` flag to True in -the associated `config.yaml` file, the sample test infra will expect a parameter `experiment_name` -to inject so that it can run in the sample test experiment.) +the associated `config.yaml` file, the sample test infra will expect the sample to use the +`kfp.Client().create_run_from_pipeline_func` method for starting the run so that the sample test can watch the run.) Detailed guideline is [here](https://github.com/nteract/papermill). Also, all the environment setup and preparation should be within the notebook, such as by `!pip install packages` diff --git a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb b/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb index 1db4df1270b..353001f2ecd 100644 --- a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb +++ b/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb @@ -48,7 +48,6 @@ "\n", "**Please fill in the below environment variables with you own settings.**\n", "\n", - "- **EXPERIMENT_NAME**: A unique experiment name that will be created for this notebook demo.\n", "- **KFP_PACKAGE**: The latest release of kubeflow pipeline platform library.\n", "- **KUBEFLOW_PIPELINE_LINK**: The link to access the KubeFlow pipeline API.\n", "- **MOUNT**: The mount configuration to map data above into the training job. The format is 'data:/directory'\n", @@ -61,8 +60,6 @@ "metadata": {}, "outputs": [], "source": [ - "EXPERIMENT_NAME = 'myjob'\n", - "RUN_ID=\"run\"\n", "KFP_SERVICE=\"ml-pipeline.kubeflow.svc.cluster.local:8888\"\n", "KFP_PACKAGE = 'http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp/0.1.14/kfp.tar.gz'\n", "KFP_ARENA_PACKAGE = 'http://kubeflow.oss-cn-beijing.aliyuncs.com/kfp-arena/kfp-arena-0.3.tar.gz'\n", diff --git a/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb b/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb index 3dcf85e772f..44f41334dd3 100644 --- a/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb +++ b/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb @@ -90,10 +90,7 @@ "# KUBEFLOW_PIPELINE_LINK = ''\n", "# client = kfp.Client(KUBEFLOW_PIPELINE_LINK)\n", "\n", - "client = kfp.Client()\n", - "\n", - "\n", - "EXPERIMENT_NAME = 'FfDL-Seldon Experiments'" + "client = kfp.Client()\n" ] }, { @@ -179,7 +176,7 @@ " 'model-class-file': 'gender_classification.py'}\n", "\n", "\n", - "run = client.create_run_from_pipeline_func(ffdlPipeline, arguments=parameters, experiment_name=EXPERIMENT_NAME).run_info\n", + "run = client.create_run_from_pipeline_func(ffdlPipeline, arguments=parameters).run_info\n", "\n", "import IPython\n", "html = ('

'\n", diff --git a/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb b/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb index 0d72d06b1bd..5a8a27259a5 100644 --- a/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb +++ b/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb @@ -135,7 +135,6 @@ "outputs": [], "source": [ "# Kubeflow project settings\n", - "EXPERIMENT_NAME = 'Image Captioning'\n", "PROJECT_NAME = '[YOUR-PROJECT-NAME]' \n", "PIPELINE_STORAGE_PATH = GCS_BUCKET + '/ms-coco/components' # path to save pipeline component images\n", "BASE_IMAGE = 'gcr.io/%s/img-cap:latest' % PROJECT_NAME # using image created in README instructions\n", @@ -913,7 +912,7 @@ " 'training_batch_size': 16, # has to be smaller since only training on 80/100 examples \n", "}\n", "\n", - "kfp.Client().create_run_from_pipeline_func(pipeline, arguments=arguments, experiment_name=EXPERIMENT_NAME)" + "kfp.Client().create_run_from_pipeline_func(pipeline, arguments=arguments)" ] }, { diff --git a/samples/core/ai_platform/ai_platform.ipynb b/samples/core/ai_platform/ai_platform.ipynb index 4bcf6924fe7..5ee5047a652 100644 --- a/samples/core/ai_platform/ai_platform.ipynb +++ b/samples/core/ai_platform/ai_platform.ipynb @@ -79,8 +79,7 @@ "source": [ "# Required Parameters\n", "project_id = ''\n", - "output = 'gs://' # No ending slash\n", - "experiment_name = 'Chicago Crime Prediction'" + "output = 'gs://' # No ending slash\n" ] }, { @@ -280,7 +279,7 @@ "metadata": {}, "outputs": [], "source": [ - "pipeline = kfp.Client().create_run_from_pipeline_func(pipeline, arguments={}, experiment_name=experiment_name)" + "pipeline = kfp.Client().create_run_from_pipeline_func(pipeline, arguments={})" ] }, { diff --git a/samples/core/component_build/component_build.ipynb b/samples/core/component_build/component_build.ipynb index 297945fce6d..b39dcce05cf 100644 --- a/samples/core/component_build/component_build.ipynb +++ b/samples/core/component_build/component_build.ipynb @@ -65,7 +65,6 @@ }, "outputs": [], "source": [ - "experiment_name = 'container_building'" ] }, { @@ -202,7 +201,7 @@ "outputs": [], "source": [ "arguments = {'a': '7', 'b': '8'}\n", - "kfp.Client().create_run_from_pipeline_func(pipeline_func=calc_pipeline, arguments=arguments, experiment_name=experiment_name)\n", + "kfp.Client().create_run_from_pipeline_func(pipeline_func=calc_pipeline, arguments=arguments)\n", "\n", "# This should output link that leads to the run information page. \n", "# Note: There is a bug in JupyterLab that modifies the URL and makes the link stop working" diff --git a/samples/core/dataflow/dataflow.ipynb b/samples/core/dataflow/dataflow.ipynb index 241ccbf0dff..bdf8d6373f5 100644 --- a/samples/core/dataflow/dataflow.ipynb +++ b/samples/core/dataflow/dataflow.ipynb @@ -74,8 +74,7 @@ "outputs": [], "source": [ "project = 'Input your PROJECT ID'\n", - "output = 'Input your GCS bucket name' # No ending slash\n", - "experiment_name = 'Dataflow - Launch Python'" + "output = 'Input your GCS bucket name' # No ending slash\n" ] }, { @@ -368,7 +367,7 @@ } ], "source": [ - "kfp.Client().create_run_from_pipeline_func(pipeline, arguments={}, experiment_name=experiment_name)" + "kfp.Client().create_run_from_pipeline_func(pipeline, arguments={})" ] }, { diff --git a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb index d830a6507b8..64c551207bf 100644 --- a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb +++ b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb @@ -172,7 +172,6 @@ "# Set your output and project. !!!Must Do before you can proceed!!!\n", "project = 'Your-Gcp-Project-ID' #'Your-GCP-Project-ID'\n", "model_name = 'model-name' # Model name matching TF_serve naming requirements \n", - "experiment_name = 'serving_component'\n", "import time\n", "ts = int(time.time())\n", "model_version = str(ts) # Here we use timestamp as version to avoid conflict \n", @@ -323,7 +322,7 @@ } ], "source": [ - "kfp.Client().create_run_from_pipeline_func(model_server, arguments={}, experiment_name=experiment_name)\n", + "kfp.Client().create_run_from_pipeline_func(model_server, arguments={})\n", "\n", "#vvvvvvvvv This link leads to the run information page. (Note: There is a bug in JupyterLab that modifies the URL and makes the link stop working)" ] diff --git a/samples/core/lightweight_component/lightweight_component.ipynb b/samples/core/lightweight_component/lightweight_component.ipynb index 5bb0ce7298e..8292839bec5 100644 --- a/samples/core/lightweight_component/lightweight_component.ipynb +++ b/samples/core/lightweight_component/lightweight_component.ipynb @@ -29,7 +29,6 @@ }, "outputs": [], "source": [ - "experiment_name = 'lightweight python components'" ] }, { @@ -243,7 +242,7 @@ "arguments = {'a': '7', 'b': '8'}\n", "\n", "#Submit a pipeline run\n", - "kfp.Client().create_run_from_pipeline_func(calc_pipeline, arguments=arguments, experiment_name=experiment_name)\n", + "kfp.Client().create_run_from_pipeline_func(calc_pipeline, arguments=arguments)\n", "\n", "#vvvvvvvvv This link leads to the run information page. (Note: There is a bug in JupyterLab that modifies the URL and makes the link stop working)" ] diff --git a/samples/core/multiple_outputs/multiple_outputs.ipynb b/samples/core/multiple_outputs/multiple_outputs.ipynb index 61a7dd0b522..469d0275e9a 100644 --- a/samples/core/multiple_outputs/multiple_outputs.ipynb +++ b/samples/core/multiple_outputs/multiple_outputs.ipynb @@ -51,8 +51,7 @@ "outputs": [], "source": [ "output = 'gs://[BUCKET-NAME]' # GCS bucket name\n", - "project_id = '[PROJECT-NAME]' # GCP project name\n", - "experiment_name = 'Multiple Outputs Sample'" + "project_id = '[PROJECT-NAME]' # GCP project name\n" ] }, { @@ -161,7 +160,7 @@ " 'b': 2.5,\n", " 'c': 3.0,\n", "}\n", - "run_result = kfp.Client().create_run_from_pipeline_func(pipeline, arguments=arguments, experiment_name=experiment_name)" + "run_result = kfp.Client().create_run_from_pipeline_func(pipeline, arguments=arguments)" ] } ], diff --git a/test/sample-test/configs/ai_platform.config.yaml b/test/sample-test/configs/ai_platform.config.yaml index 13be31e190a..6f1ee86b466 100644 --- a/test/sample-test/configs/ai_platform.config.yaml +++ b/test/sample-test/configs/ai_platform.config.yaml @@ -16,4 +16,3 @@ test_name: ai_platform notebook_params: output: project_id: ml-pipeline-test - experiment_name: ai_platform-test diff --git a/test/sample-test/configs/component_build.config.yaml b/test/sample-test/configs/component_build.config.yaml index b74b08ac6ab..39f95d6582b 100644 --- a/test/sample-test/configs/component_build.config.yaml +++ b/test/sample-test/configs/component_build.config.yaml @@ -14,5 +14,4 @@ test_name: component_build notebook_params: - experiment_name: component_build-test PROJECT_NAME: ml-pipeline-test diff --git a/test/sample-test/configs/dataflow.config.yaml b/test/sample-test/configs/dataflow.config.yaml index 28a853bf9fd..ddcca910761 100644 --- a/test/sample-test/configs/dataflow.config.yaml +++ b/test/sample-test/configs/dataflow.config.yaml @@ -16,5 +16,4 @@ test_name: dataflow notebook_params: output: project: ml-pipeline-test - experiment_name: dataflow-test run_pipeline: False \ No newline at end of file diff --git a/test/sample-test/configs/kubeflow_tf_serving.config.yaml b/test/sample-test/configs/kubeflow_tf_serving.config.yaml index c2fc5d3859e..0692aa9f0a0 100644 --- a/test/sample-test/configs/kubeflow_tf_serving.config.yaml +++ b/test/sample-test/configs/kubeflow_tf_serving.config.yaml @@ -16,4 +16,3 @@ test_name: kubeflow_tf_serving notebook_params: output: project: ml-pipeline-test - experiment_name: kubeflow_tf_serving-test \ No newline at end of file diff --git a/test/sample-test/configs/multiple_outputs.config.yaml b/test/sample-test/configs/multiple_outputs.config.yaml index 00f400f68ec..3b55b6b470a 100644 --- a/test/sample-test/configs/multiple_outputs.config.yaml +++ b/test/sample-test/configs/multiple_outputs.config.yaml @@ -16,4 +16,3 @@ test_name: multiple_outputs notebook_params: output: project_id: ml-pipeline-test - experiment_name: multiple_outputs-test \ No newline at end of file From e38119288e2fb715ec2a844c94883341d80da48a Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Fri, 11 Oct 2019 18:38:07 -0700 Subject: [PATCH 9/9] Changed the SDK installation lines in samples Otherwise the sample tests do not correctly test the new SDK code. --- samples/core/ai_platform/ai_platform.ipynb | 2 +- samples/core/component_build/component_build.ipynb | 2 +- samples/core/dataflow/dataflow.ipynb | 3 +-- .../dsl_static_type_checking/dsl_static_type_checking.ipynb | 2 +- samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb | 2 +- samples/core/multiple_outputs/multiple_outputs.ipynb | 2 +- samples/core/tfx-oss/TFX Example.ipynb | 2 +- 7 files changed, 7 insertions(+), 8 deletions(-) diff --git a/samples/core/ai_platform/ai_platform.ipynb b/samples/core/ai_platform/ai_platform.ipynb index 5ee5047a652..709a78cd6c6 100644 --- a/samples/core/ai_platform/ai_platform.ipynb +++ b/samples/core/ai_platform/ai_platform.ipynb @@ -31,7 +31,7 @@ "%%capture\n", "\n", "# Install the SDK (Uncomment the code if the SDK is not installed before)\n", - "!python3 -m pip install kfp --upgrade -q\n", + "!python3 -m pip install 'kfp>=0.1.31' --quiet\n", "!python3 -m pip install pandas --upgrade -q" ] }, diff --git a/samples/core/component_build/component_build.ipynb b/samples/core/component_build/component_build.ipynb index b39dcce05cf..221d5659b11 100644 --- a/samples/core/component_build/component_build.ipynb +++ b/samples/core/component_build/component_build.ipynb @@ -30,7 +30,7 @@ "outputs": [], "source": [ "# Install Pipeline SDK - This only needs to be ran once in the enviroment. \n", - "!pip3 install kfp --upgrade --quiet" + "!python3 -m pip install 'kfp>=0.1.31' --quiet\n" ] }, { diff --git a/samples/core/dataflow/dataflow.ipynb b/samples/core/dataflow/dataflow.ipynb index bdf8d6373f5..b17787bedbf 100644 --- a/samples/core/dataflow/dataflow.ipynb +++ b/samples/core/dataflow/dataflow.ipynb @@ -94,8 +94,7 @@ "metadata": {}, "outputs": [], "source": [ - "%%capture --no-stderr\n", - "!pip3 install kfp --upgrade" + "!python3 -m pip install 'kfp>=0.1.31' --quiet\n" ] }, { diff --git a/samples/core/dsl_static_type_checking/dsl_static_type_checking.ipynb b/samples/core/dsl_static_type_checking/dsl_static_type_checking.ipynb index 64ef3b6bb9c..32cc93a4bd0 100644 --- a/samples/core/dsl_static_type_checking/dsl_static_type_checking.ipynb +++ b/samples/core/dsl_static_type_checking/dsl_static_type_checking.ipynb @@ -162,7 +162,7 @@ } ], "source": [ - "!pip3 install kfp --upgrade" + "!python3 -m pip install 'kfp>=0.1.31' --quiet\n" ] }, { diff --git a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb index 64c551207bf..96c86047ca5 100644 --- a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb +++ b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb @@ -132,7 +132,7 @@ ], "source": [ "# Install Pipeline SDK - This only needs to be ran once in the enviroment. \n", - "!pip3 install kfp --upgrade\n", + "!python3 -m pip install 'kfp>=0.1.31' --quiet\n", "!pip3 install tensorflow==1.14 --upgrade" ] }, diff --git a/samples/core/multiple_outputs/multiple_outputs.ipynb b/samples/core/multiple_outputs/multiple_outputs.ipynb index 469d0275e9a..db5ee0cc1f8 100644 --- a/samples/core/multiple_outputs/multiple_outputs.ipynb +++ b/samples/core/multiple_outputs/multiple_outputs.ipynb @@ -30,7 +30,7 @@ }, "outputs": [], "source": [ - "!pip install kfp --upgrade" + "!python3 -m pip install 'kfp>=0.1.31' --quiet\n" ] }, { diff --git a/samples/core/tfx-oss/TFX Example.ipynb b/samples/core/tfx-oss/TFX Example.ipynb index de50af35cad..770d35f56f0 100644 --- a/samples/core/tfx-oss/TFX Example.ipynb +++ b/samples/core/tfx-oss/TFX Example.ipynb @@ -18,7 +18,7 @@ "outputs": [], "source": [ "!pip3 install tfx==0.13.0 --upgrade\n", - "!pip3 install kfp --upgrade" + "!python3 -m pip install 'kfp>=0.1.31' --quiet\n" ] }, {