diff --git a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb b/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb index 79cf6299020..1db4df1270b 100644 --- a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb +++ b/samples/contrib/arena-samples/standalonejob/standalone_pipeline.ipynb @@ -208,29 +208,6 @@ "!pip3 install $KFP_ARENA_PACKAGE --upgrade" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Import the KubeFlow Pipeline library and define the client and experiment " - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "import kfp\n", - "\n", - "client = kfp.Client(KUBEFLOW_PIPELINE_LINK)\n", - "\n", - "try:\n", - " experiment_id = client.get_experiment(experiment_name=EXPERIMENT_NAME).id\n", - "except:\n", - " experiment_id = client.create_experiment(EXPERIMENT_NAME).id" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -246,7 +223,6 @@ "source": [ "import arena\n", "import kfp.dsl as dsl\n", - "import kfp.compiler as compiler\n", "\n", "@dsl.pipeline(\n", " name='pipeline to run jobs',\n", @@ -328,11 +304,15 @@ "dropout = \"0.8\"\n", "model_verison = \"1\"\n", "\n", - "compiler.Compiler().compile(sample_pipeline, 'standalone.tar.gz')\n", + "arguments = {\n", + " 'learning_rate': learning_rate,\n", + " 'dropout': dropout,\n", + " 'model_version': model_version,\n", + "}\n", "\n", - "run = client.run_pipeline(experiment_id, 'mnist', 'standalone.tar.gz', params={'learning_rate':learning_rate,\n", - " 'dropout':dropout,\n", - " 'model_version':model_version})\n", + "import kfp\n", + "client = kfp.Client(host=KUBEFLOW_PIPELINE_LINK)\n", + "run = client.create_run_from_pipeline_func(sample_pipeline, arguments=arguments).run_info\n", "\n", "print('The above run link is assuming you ran this cell on JupyterHub that is deployed on the same cluster. ' +\n", " 'The actual run link is ' + KUBEFLOW_PIPELINE_LINK + '/#/runs/details/' + run.id)" @@ -362,7 +342,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.4" + "version": "3.6.8" } }, "nbformat": 4, diff --git a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.py b/samples/contrib/arena-samples/standalonejob/standalone_pipeline.py index 096de1cc3e2..a317dbacd71 100644 --- a/samples/contrib/arena-samples/standalonejob/standalone_pipeline.py +++ b/samples/contrib/arena-samples/standalonejob/standalone_pipeline.py @@ -77,19 +77,15 @@ def sample_pipeline(learning_rate='0.01', dropout = FLAGS.dropout learning_rate = FLAGS.learning_rate commit = FLAGS.commit + + arguments = { + 'learning_rate': learning_rate, + 'dropout': dropout, + 'model_version': model_version, + 'commit': commit, + } - EXPERIMENT_NAME="mnist" - RUN_ID="run" KFP_SERVICE="ml-pipeline.kubeflow.svc.cluster.local:8888" - import kfp.compiler as compiler - compiler.Compiler().compile(sample_pipeline, __file__ + '.tar.gz') client = kfp.Client(host=KFP_SERVICE) - try: - experiment_id = client.get_experiment(experiment_name=EXPERIMENT_NAME).id - except: - experiment_id = client.create_experiment(EXPERIMENT_NAME).id - run = client.run_pipeline(experiment_id, RUN_ID, __file__ + '.tar.gz', - params={'learning_rate':learning_rate, - 'dropout':dropout, - 'model_version':model_version, - 'commit':commit}) \ No newline at end of file + + client.create_run_from_pipeline_func(sample_pipeline, arguments=arguments) diff --git a/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb b/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb index a5a470083b2..3dcf85e772f 100644 --- a/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb +++ b/samples/contrib/ibm-samples/ffdl-seldon/ffdl_pipeline.ipynb @@ -7,7 +7,6 @@ "# Train and deploy with FfDL and Seldon demo\n" ] }, - { "cell_type": "markdown", "metadata": {}, @@ -15,7 +14,7 @@ "##### A simple IBM OSS pipeline demonstrates how to train a model using Fabric for Deep Learning and then deploy it with Seldon.\n", "\n" ] - }, + }, { "cell_type": "markdown", "metadata": {}, @@ -33,7 +32,7 @@ "source": [ "### Define the necessary environment variables and install the KubeFlow Pipeline SDK\n", "We assume this notebook kernel has access to Python's site-packages and is in Python3.\n", - "\n", + "\n" ] }, { @@ -94,9 +93,7 @@ "client = kfp.Client()\n", "\n", "\n", - "EXPERIMENT_NAME = 'FfDL-Seldon Experiments'\n", - "\n", - "exp = client.create_experiment(name=EXPERIMENT_NAME)\n" + "EXPERIMENT_NAME = 'FfDL-Seldon Experiments'" ] }, { @@ -182,10 +179,8 @@ " 'model-class-file': 'gender_classification.py'}\n", "\n", "\n", - "compiler.Compiler().compile(ffdlPipeline, 'ffdl-pipeline.tar.gz')\n", + "run = client.create_run_from_pipeline_func(ffdlPipeline, arguments=parameters, experiment_name=EXPERIMENT_NAME).run_info\n", "\n", - "run = client.run_pipeline(exp.id, 'ffdl-seldon-pipeline', 'ffdl-pipeline.tar.gz', \n", - " params=parameters)\n", "import IPython\n", "html = ('

'\n", " % (client._get_url_prefix(), run.id))\n", @@ -202,9 +197,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python on Kubernetes", + "display_name": "Python 3", "language": "python", - "name": "python_kubernetes" + "name": "python3" }, "language_info": { "codemirror_mode": { diff --git a/samples/contrib/ibm-samples/openscale/README.md b/samples/contrib/ibm-samples/openscale/README.md index 00a0c847338..1cfe2a0a836 100644 --- a/samples/contrib/ibm-samples/openscale/README.md +++ b/samples/contrib/ibm-samples/openscale/README.md @@ -47,13 +47,10 @@ To access the credentials file, the user should provide a github access token an ```python import kfp.dsl as dsl import kfp.components as components -from kfp import compiler import kfp secret_name = 'aios-creds' configuration_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/ibm-components/commons/config/component.yaml') -client = kfp.Client() -EXPERIMENT_NAME = 'create secret' -exp = client.create_experiment(name=EXPERIMENT_NAME) + @dsl.pipeline( name='create secret', description='' @@ -67,8 +64,8 @@ def secret_pipeline( url=CONFIG_FILE_URL, name=secret_name ) -compiler.Compiler().compile(secret_pipeline, 'secret_pipeline.tar.gz') -run = client.run_pipeline(exp.id, 'secret_pipeline', 'secret_pipeline.tar.gz') + +kfp.Client().create_run_from_pipeline_func(secret_pipeline, arguments={}) ``` ## Instructions diff --git a/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb b/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb index 4381501cac5..0d72d06b1bd 100644 --- a/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb +++ b/samples/contrib/image-captioning-gcp/Image Captioning TF 2.0.ipynb @@ -893,28 +893,6 @@ " use_gcp_secret('user-gcp-sa'))" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_filename = caption_pipeline.__name__ + '.pipeline.zip'\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -934,9 +912,8 @@ " 'num_examples': 100, # Small test to make sure pipeline functions properly\n", " 'training_batch_size': 16, # has to be smaller since only training on 80/100 examples \n", "}\n", - "run_name = caption_pipeline.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename,\n", - " params=arguments)" + "\n", + "kfp.Client().create_run_from_pipeline_func(pipeline, arguments=arguments, experiment_name=EXPERIMENT_NAME)" ] }, { diff --git a/samples/core/ai-platform/Chicago Crime Pipeline.ipynb b/samples/core/ai-platform/Chicago Crime Pipeline.ipynb index ca6efe3d416..9ad290b653a 100644 --- a/samples/core/ai-platform/Chicago Crime Pipeline.ipynb +++ b/samples/core/ai-platform/Chicago Crime Pipeline.ipynb @@ -45,7 +45,6 @@ "import json\n", "\n", "import kfp\n", - "import kfp.compiler as compiler\n", "import kfp.components as comp\n", "import kfp.dsl as dsl\n", "import kfp.gcp as gcp\n", @@ -254,24 +253,6 @@ "pipeline_func = pipeline" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Compile pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_filename = PIPELINE_FILENAME_PREFIX + '.pipeline.zip'\n", - "\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -285,19 +266,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "# Get or create an experiment and submit a pipeline run\n", - "client = kfp.Client()\n", - "try:\n", - " experiment = client.get_experiment(experiment_name=EXPERIMENT_NAME)\n", - "except:\n", - " experiment = client.create_experiment(EXPERIMENT_NAME)\n", - "\n", - "# Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" + "kfp.Client().create_run_from_pipeline_func(pipeline, arguments={}, experiment_name=EXPERIMENT_NAME)" ] } ], @@ -317,18 +286,18 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.7" + "version": "3.6.8" }, "pycharm": { "stem_cell": { "cell_type": "raw", - "source": [], "metadata": { "collapsed": false - } + }, + "source": [] } } }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/samples/core/dataflow/dataflow.ipynb b/samples/core/dataflow/dataflow.ipynb index e95085f1ff8..241ccbf0dff 100644 --- a/samples/core/dataflow/dataflow.ipynb +++ b/samples/core/dataflow/dataflow.ipynb @@ -302,6 +302,7 @@ "metadata": {}, "outputs": [], "source": [ + "import kfp\n", "import kfp.dsl as dsl\n", "import kfp.gcp as gcp\n", "import json\n", @@ -329,25 +330,6 @@ " wait_interval = wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -386,17 +368,7 @@ } ], "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(experiment_name)\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)" + "kfp.Client().create_run_from_pipeline_func(pipeline, arguments={}, experiment_name=experiment_name)" ] }, { diff --git a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb index 1c253755d15..d830a6507b8 100644 --- a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb +++ b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb @@ -190,22 +190,6 @@ "model_version_path = '%s/%s/%s' % (output,model_name,model_version)" ] }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [], - "source": [ - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "\n", - "try:\n", - " experiment = client.get_experiment(experiment_name=experiment_name)\n", - "except:\n", - " experiment = client.create_experiment(experiment_name)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -314,16 +298,10 @@ ] }, { - "cell_type": "code", - "execution_count": 38, + "cell_type": "markdown", "metadata": {}, - "outputs": [], "source": [ - "pipeline_func = model_server\n", - "pipeline_filename = pipeline_func.__name__ + '.pipeline.zip'\n", - "\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" + "Submit pipeline for execution on Kubeflow Pipelines cluster" ] }, { @@ -345,15 +323,9 @@ } ], "source": [ - "#Specify pipeline argument values\n", - "arguments = {}\n", - "\n", - "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)\n", + "kfp.Client().create_run_from_pipeline_func(model_server, arguments={}, experiment_name=experiment_name)\n", "\n", - "#This link leads to the run information page. \n", - "#Note: There is a bug in JupyterLab that modifies the URL and makes the link stop working" + "#vvvvvvvvv This link leads to the run information page. (Note: There is a bug in JupyterLab that modifies the URL and makes the link stop working)" ] } ], diff --git a/samples/core/lightweight_component/lightweight_component.ipynb b/samples/core/lightweight_component/lightweight_component.ipynb index 79db4e6c165..b0465f152f9 100644 --- a/samples/core/lightweight_component/lightweight_component.ipynb +++ b/samples/core/lightweight_component/lightweight_component.ipynb @@ -48,6 +48,7 @@ "metadata": {}, "outputs": [], "source": [ + "import kfp\n", "import kfp.components as comp" ] }, @@ -225,25 +226,6 @@ " result_task = add_op(divmod_task.outputs['quotient'], c)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Compile the pipeline" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_func = calc_pipeline\n", - "pipeline_filename = pipeline_func.__name__ + '.pipeline.zip'\n", - "import kfp.compiler as compiler\n", - "compiler.Compiler().compile(pipeline_func, pipeline_filename)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -260,14 +242,8 @@ "#Specify pipeline argument values\n", "arguments = {'a': '7', 'b': '8'}\n", "\n", - "#Get or create an experiment and submit a pipeline run\n", - "import kfp\n", - "client = kfp.Client()\n", - "experiment = client.create_experiment(experiment_name)\n", - "\n", "#Submit a pipeline run\n", - "run_name = pipeline_func.__name__ + ' run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments)\n", + "kfp.Client().create_run_from_pipeline_func(calc_pipeline, arguments=arguments, experiment_name=experiment_name)\n", "\n", "#vvvvvvvvv This link leads to the run information page. (Note: There is a bug in JupyterLab that modifies the URL and makes the link stop working)" ] diff --git a/samples/core/multiple_outputs/Multiple outputs - basics.ipynb b/samples/core/multiple_outputs/Multiple outputs - basics.ipynb index d12137c4022..33c96800c67 100644 --- a/samples/core/multiple_outputs/Multiple outputs - basics.ipynb +++ b/samples/core/multiple_outputs/Multiple outputs - basics.ipynb @@ -142,16 +142,6 @@ " prod_sum_task2.outputs['sum'])" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "pipeline_filename = 'multiple-outputs.pipelines.zip'\n", - "compiler.Compiler().compile(pipeline, pipeline_filename)" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -159,16 +149,6 @@ "### Run pipeline" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "client = kfp.Client()\n", - "experiment = client.create_experiment(EXPERIMENT_NAME)" - ] - }, { "cell_type": "code", "execution_count": null, @@ -180,9 +160,7 @@ " 'b': 2.5,\n", " 'c': 3.0,\n", "}\n", - "run_name = 'multiple output run'\n", - "run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename,\n", - " params=arguments)" + "run_result = kfp.Client().create_run_from_pipeline_func(pipeline, arguments=arguments, experiment_name=EXPERIMENT_NAME)" ] } ], diff --git a/samples/core/tfx-oss/TFX Example.ipynb b/samples/core/tfx-oss/TFX Example.ipynb index f98450ade3a..de50af35cad 100644 --- a/samples/core/tfx-oss/TFX Example.ipynb +++ b/samples/core/tfx-oss/TFX Example.ipynb @@ -268,7 +268,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Compile the pipeline and submit a run to the Kubeflow cluster" + "## Submit pipeline for execution on the Kubeflow cluster" ] }, { @@ -277,20 +277,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Get or create a new experiment\n", "import kfp\n", - "client = kfp.Client()\n", - "experiment_name='TFX Examples'\n", - "try:\n", - " experiment_id = client.get_experiment(experiment_name=experiment_name).id\n", - "except:\n", - " experiment_id = client.create_experiment(experiment_name).id\n", - "\n", - "pipeline_filename = 'chicago_taxi_pipeline_kubeflow.tar.gz'\n", - "\n", - "#Submit a pipeline run\n", - "run_name = 'Run 1'\n", - "run_result = client.run_pipeline(experiment_id, run_name, pipeline_filename, {})\n" + "run_result = kfp.Client().create_run_from_pipeline_package('chicago_taxi_pipeline_kubeflow.tar.gz', arguments={})" ] }, {