diff --git a/components/deprecated/dataflow/predict/component.yaml b/components/deprecated/dataflow/predict/component.yaml index 6e829bcb893..cc408870fa7 100644 --- a/components/deprecated/dataflow/predict/component.yaml +++ b/components/deprecated/dataflow/predict/component.yaml @@ -16,7 +16,7 @@ outputs: - {name: MLPipeline UI metadata, type: UI metadata} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:1449d08aeeeb47731d019ea046d90904d9c77953 command: [python2, /ml/predict.py] args: [ --data, {inputValue: Data file pattern}, diff --git a/components/deprecated/dataflow/tfdv/component.yaml b/components/deprecated/dataflow/tfdv/component.yaml index b72a358fa73..fa7b177a02d 100644 --- a/components/deprecated/dataflow/tfdv/component.yaml +++ b/components/deprecated/dataflow/tfdv/component.yaml @@ -18,7 +18,7 @@ outputs: - {name: Validation result, type: String, description: Indicates whether anomalies were detected or not.} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:1449d08aeeeb47731d019ea046d90904d9c77953 command: [python2, /ml/validate.py] args: [ --csv-data-for-inference, {inputValue: Inference data}, diff --git a/components/deprecated/dataflow/tfma/component.yaml b/components/deprecated/dataflow/tfma/component.yaml index 978e97c7e2e..3ae70275f63 100644 --- a/components/deprecated/dataflow/tfma/component.yaml +++ b/components/deprecated/dataflow/tfma/component.yaml @@ -18,7 +18,7 @@ outputs: - {name: MLPipeline UI metadata, type: UI metadata} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:1449d08aeeeb47731d019ea046d90904d9c77953 command: [python2, /ml/model_analysis.py] args: [ --model, {inputValue: Model}, diff --git a/components/deprecated/dataflow/tft/component.yaml b/components/deprecated/dataflow/tft/component.yaml index c77ea6886ba..9ff690e42b4 100644 --- a/components/deprecated/dataflow/tft/component.yaml +++ b/components/deprecated/dataflow/tft/component.yaml @@ -12,7 +12,7 @@ outputs: - {name: Transformed data dir, type: GCSPath} # type: {GCSPath: {path_type: Directory}} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:1449d08aeeeb47731d019ea046d90904d9c77953 command: [python2, /ml/transform.py] args: [ --train, {inputValue: Training data file pattern}, diff --git a/components/gcp/bigquery/query/README.md b/components/gcp/bigquery/query/README.md index 8e9331f013a..649d7b7f470 100644 --- a/components/gcp/bigquery/query/README.md +++ b/components/gcp/bigquery/query/README.md @@ -89,7 +89,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp bigquery_query_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/bigquery/query/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/bigquery/query/component.yaml') help(bigquery_query_op) ``` diff --git a/components/gcp/bigquery/query/component.yaml b/components/gcp/bigquery/query/component.yaml index d1945e73fbe..7f38746b315 100644 --- a/components/gcp/bigquery/query/component.yaml +++ b/components/gcp/bigquery/query/component.yaml @@ -59,7 +59,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.bigquery, query, --query, {inputValue: query}, diff --git a/components/gcp/bigquery/query/sample.ipynb b/components/gcp/bigquery/query/sample.ipynb index 98b6e75ad07..1fe965d2f20 100644 --- a/components/gcp/bigquery/query/sample.ipynb +++ b/components/gcp/bigquery/query/sample.ipynb @@ -108,7 +108,7 @@ "import kfp.components as comp\n", "\n", "bigquery_query_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/bigquery/query/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/bigquery/query/component.yaml')\n", "help(bigquery_query_op)" ] }, diff --git a/components/gcp/dataflow/launch_python/README.md b/components/gcp/dataflow/launch_python/README.md index 8279bd18ed7..a232a64f91e 100644 --- a/components/gcp/dataflow/launch_python/README.md +++ b/components/gcp/dataflow/launch_python/README.md @@ -77,7 +77,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataflow_python_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataflow/launch_python/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataflow/launch_python/component.yaml') help(dataflow_python_op) ``` diff --git a/components/gcp/dataflow/launch_python/component.yaml b/components/gcp/dataflow/launch_python/component.yaml index 2c4b90fa468..f1b2cb6bacb 100644 --- a/components/gcp/dataflow/launch_python/component.yaml +++ b/components/gcp/dataflow/launch_python/component.yaml @@ -53,7 +53,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataflow, launch_python, --python_file_path, {inputValue: python_file_path}, diff --git a/components/gcp/dataflow/launch_python/sample.ipynb b/components/gcp/dataflow/launch_python/sample.ipynb index db277bae853..62b52d6841c 100644 --- a/components/gcp/dataflow/launch_python/sample.ipynb +++ b/components/gcp/dataflow/launch_python/sample.ipynb @@ -95,7 +95,7 @@ "import kfp.components as comp\n", "\n", "dataflow_python_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataflow/launch_python/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataflow/launch_python/component.yaml')\n", "help(dataflow_python_op)" ] }, diff --git a/components/gcp/dataflow/launch_template/README.md b/components/gcp/dataflow/launch_template/README.md index 92a0f9771f1..50cb754207a 100644 --- a/components/gcp/dataflow/launch_template/README.md +++ b/components/gcp/dataflow/launch_template/README.md @@ -67,7 +67,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataflow_template_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataflow/launch_template/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataflow/launch_template/component.yaml') help(dataflow_template_op) ``` diff --git a/components/gcp/dataflow/launch_template/component.yaml b/components/gcp/dataflow/launch_template/component.yaml index 75ad52be9e7..1718ef6195e 100644 --- a/components/gcp/dataflow/launch_template/component.yaml +++ b/components/gcp/dataflow/launch_template/component.yaml @@ -63,7 +63,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataflow, launch_template, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataflow/launch_template/sample.ipynb b/components/gcp/dataflow/launch_template/sample.ipynb index e5dd3eb1c0c..443a0d61135 100644 --- a/components/gcp/dataflow/launch_template/sample.ipynb +++ b/components/gcp/dataflow/launch_template/sample.ipynb @@ -85,7 +85,7 @@ "import kfp.components as comp\n", "\n", "dataflow_template_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataflow/launch_template/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataflow/launch_template/component.yaml')\n", "help(dataflow_template_op)" ] }, diff --git a/components/gcp/dataproc/create_cluster/README.md b/components/gcp/dataproc/create_cluster/README.md index 94ceb6d7d85..ead4fc7ef0b 100644 --- a/components/gcp/dataproc/create_cluster/README.md +++ b/components/gcp/dataproc/create_cluster/README.md @@ -74,7 +74,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataproc_create_cluster_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/create_cluster/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/create_cluster/component.yaml') help(dataproc_create_cluster_op) ``` diff --git a/components/gcp/dataproc/create_cluster/component.yaml b/components/gcp/dataproc/create_cluster/component.yaml index d5383dcbf5c..411f47c922a 100644 --- a/components/gcp/dataproc/create_cluster/component.yaml +++ b/components/gcp/dataproc/create_cluster/component.yaml @@ -70,7 +70,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataproc, create_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/create_cluster/sample.ipynb b/components/gcp/dataproc/create_cluster/sample.ipynb index 5c22e8731b8..2f8753b4acb 100644 --- a/components/gcp/dataproc/create_cluster/sample.ipynb +++ b/components/gcp/dataproc/create_cluster/sample.ipynb @@ -92,7 +92,7 @@ "import kfp.components as comp\n", "\n", "dataproc_create_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/create_cluster/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/create_cluster/component.yaml')\n", "help(dataproc_create_cluster_op)" ] }, diff --git a/components/gcp/dataproc/delete_cluster/README.md b/components/gcp/dataproc/delete_cluster/README.md index c9167e81ea9..fcedd643931 100644 --- a/components/gcp/dataproc/delete_cluster/README.md +++ b/components/gcp/dataproc/delete_cluster/README.md @@ -56,7 +56,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataproc_delete_cluster_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/delete_cluster/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/delete_cluster/component.yaml') help(dataproc_delete_cluster_op) ``` diff --git a/components/gcp/dataproc/delete_cluster/component.yaml b/components/gcp/dataproc/delete_cluster/component.yaml index 15654a8f24e..eb96cd820c1 100644 --- a/components/gcp/dataproc/delete_cluster/component.yaml +++ b/components/gcp/dataproc/delete_cluster/component.yaml @@ -36,7 +36,7 @@ inputs: type: Integer implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataproc, delete_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/delete_cluster/sample.ipynb b/components/gcp/dataproc/delete_cluster/sample.ipynb index db162b43eb4..accf0ac5ccd 100644 --- a/components/gcp/dataproc/delete_cluster/sample.ipynb +++ b/components/gcp/dataproc/delete_cluster/sample.ipynb @@ -75,7 +75,7 @@ "import kfp.components as comp\n", "\n", "dataproc_delete_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/delete_cluster/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/delete_cluster/component.yaml')\n", "help(dataproc_delete_cluster_op)" ] }, diff --git a/components/gcp/dataproc/submit_hadoop_job/README.md b/components/gcp/dataproc/submit_hadoop_job/README.md index 23487186c16..fcccf4d9b6e 100644 --- a/components/gcp/dataproc/submit_hadoop_job/README.md +++ b/components/gcp/dataproc/submit_hadoop_job/README.md @@ -72,7 +72,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataproc_submit_hadoop_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_hadoop_job/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_hadoop_job/component.yaml') help(dataproc_submit_hadoop_job_op) ``` diff --git a/components/gcp/dataproc/submit_hadoop_job/component.yaml b/components/gcp/dataproc/submit_hadoop_job/component.yaml index 8dfcc3d56d8..029998e9e5d 100644 --- a/components/gcp/dataproc/submit_hadoop_job/component.yaml +++ b/components/gcp/dataproc/submit_hadoop_job/component.yaml @@ -80,7 +80,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataproc, submit_hadoop_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb index 7f3bdf98e2d..fdf94908fb5 100644 --- a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb +++ b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb @@ -90,7 +90,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_hadoop_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n", "help(dataproc_submit_hadoop_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_hive_job/README.md b/components/gcp/dataproc/submit_hive_job/README.md index 5d55816e3b0..8b2c303a903 100644 --- a/components/gcp/dataproc/submit_hive_job/README.md +++ b/components/gcp/dataproc/submit_hive_job/README.md @@ -63,7 +63,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataproc_submit_hive_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_hive_job/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_hive_job/component.yaml') help(dataproc_submit_hive_job_op) ``` diff --git a/components/gcp/dataproc/submit_hive_job/component.yaml b/components/gcp/dataproc/submit_hive_job/component.yaml index c0ef297fc7e..8ec2d46743e 100644 --- a/components/gcp/dataproc/submit_hive_job/component.yaml +++ b/components/gcp/dataproc/submit_hive_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataproc, submit_hive_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_hive_job/sample.ipynb b/components/gcp/dataproc/submit_hive_job/sample.ipynb index 81c8f46225e..1fd32074e52 100644 --- a/components/gcp/dataproc/submit_hive_job/sample.ipynb +++ b/components/gcp/dataproc/submit_hive_job/sample.ipynb @@ -81,7 +81,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_hive_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_hive_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_hive_job/component.yaml')\n", "help(dataproc_submit_hive_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_pig_job/README.md b/components/gcp/dataproc/submit_pig_job/README.md index 10800068599..049658c59be 100644 --- a/components/gcp/dataproc/submit_pig_job/README.md +++ b/components/gcp/dataproc/submit_pig_job/README.md @@ -66,7 +66,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataproc_submit_pig_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_pig_job/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_pig_job/component.yaml') help(dataproc_submit_pig_job_op) ``` diff --git a/components/gcp/dataproc/submit_pig_job/component.yaml b/components/gcp/dataproc/submit_pig_job/component.yaml index 76ac296156f..ca2f0ef7e9f 100644 --- a/components/gcp/dataproc/submit_pig_job/component.yaml +++ b/components/gcp/dataproc/submit_pig_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataproc, submit_pig_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_pig_job/sample.ipynb b/components/gcp/dataproc/submit_pig_job/sample.ipynb index 000c798d5d8..0db85b49e33 100644 --- a/components/gcp/dataproc/submit_pig_job/sample.ipynb +++ b/components/gcp/dataproc/submit_pig_job/sample.ipynb @@ -84,7 +84,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_pig_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_pig_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_pig_job/component.yaml')\n", "help(dataproc_submit_pig_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_pyspark_job/README.md b/components/gcp/dataproc/submit_pyspark_job/README.md index ab550f7a7c1..6a89354d444 100644 --- a/components/gcp/dataproc/submit_pyspark_job/README.md +++ b/components/gcp/dataproc/submit_pyspark_job/README.md @@ -67,7 +67,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataproc_submit_pyspark_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_pyspark_job/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_pyspark_job/component.yaml') help(dataproc_submit_pyspark_job_op) ``` diff --git a/components/gcp/dataproc/submit_pyspark_job/component.yaml b/components/gcp/dataproc/submit_pyspark_job/component.yaml index 995693ba1f7..d317993f1e7 100644 --- a/components/gcp/dataproc/submit_pyspark_job/component.yaml +++ b/components/gcp/dataproc/submit_pyspark_job/component.yaml @@ -69,7 +69,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataproc, submit_pyspark_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb index 0500ee12f9b..47fa5fbfbfa 100644 --- a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb +++ b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb @@ -86,7 +86,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_pyspark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n", "help(dataproc_submit_pyspark_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_spark_job/README.md b/components/gcp/dataproc/submit_spark_job/README.md index 742384c807a..e026d88ee3b 100644 --- a/components/gcp/dataproc/submit_spark_job/README.md +++ b/components/gcp/dataproc/submit_spark_job/README.md @@ -80,7 +80,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataproc_submit_spark_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_spark_job/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_spark_job/component.yaml') help(dataproc_submit_spark_job_op) ``` diff --git a/components/gcp/dataproc/submit_spark_job/component.yaml b/components/gcp/dataproc/submit_spark_job/component.yaml index ea94cbffdc1..da167b848b0 100644 --- a/components/gcp/dataproc/submit_spark_job/component.yaml +++ b/components/gcp/dataproc/submit_spark_job/component.yaml @@ -76,7 +76,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataproc, submit_spark_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_spark_job/sample.ipynb b/components/gcp/dataproc/submit_spark_job/sample.ipynb index b4344e3fe16..35b5e11c791 100644 --- a/components/gcp/dataproc/submit_spark_job/sample.ipynb +++ b/components/gcp/dataproc/submit_spark_job/sample.ipynb @@ -99,7 +99,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_spark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_spark_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_spark_job/component.yaml')\n", "help(dataproc_submit_spark_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_sparksql_job/README.md b/components/gcp/dataproc/submit_sparksql_job/README.md index f8ad799e700..3d14a63c705 100644 --- a/components/gcp/dataproc/submit_sparksql_job/README.md +++ b/components/gcp/dataproc/submit_sparksql_job/README.md @@ -62,7 +62,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataproc_submit_sparksql_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_sparksql_job/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_sparksql_job/component.yaml') help(dataproc_submit_sparksql_job_op) ``` diff --git a/components/gcp/dataproc/submit_sparksql_job/component.yaml b/components/gcp/dataproc/submit_sparksql_job/component.yaml index f91151ef75c..00e01a65c98 100644 --- a/components/gcp/dataproc/submit_sparksql_job/component.yaml +++ b/components/gcp/dataproc/submit_sparksql_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.dataproc, submit_sparksql_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb index bfdd7e83a08..1d4fbb8d7fe 100644 --- a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb +++ b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb @@ -81,7 +81,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_sparksql_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n", "help(dataproc_submit_sparksql_job_op)" ] }, diff --git a/components/gcp/ml_engine/batch_predict/README.md b/components/gcp/ml_engine/batch_predict/README.md index 6020191e4fd..418bcba3fdb 100644 --- a/components/gcp/ml_engine/batch_predict/README.md +++ b/components/gcp/ml_engine/batch_predict/README.md @@ -94,7 +94,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp mlengine_batch_predict_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/batch_predict/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/ml_engine/batch_predict/component.yaml') help(mlengine_batch_predict_op) ``` diff --git a/components/gcp/ml_engine/batch_predict/component.yaml b/components/gcp/ml_engine/batch_predict/component.yaml index de3ca6f166f..74eca30e1ed 100644 --- a/components/gcp/ml_engine/batch_predict/component.yaml +++ b/components/gcp/ml_engine/batch_predict/component.yaml @@ -69,7 +69,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.ml_engine, batch_predict, --project_id, {inputValue: project_id}, diff --git a/components/gcp/ml_engine/batch_predict/sample.ipynb b/components/gcp/ml_engine/batch_predict/sample.ipynb index e8de2f10656..230423dde37 100644 --- a/components/gcp/ml_engine/batch_predict/sample.ipynb +++ b/components/gcp/ml_engine/batch_predict/sample.ipynb @@ -112,7 +112,7 @@ "import kfp.components as comp\n", "\n", "mlengine_batch_predict_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/batch_predict/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/ml_engine/batch_predict/component.yaml')\n", "help(mlengine_batch_predict_op)" ] }, diff --git a/components/gcp/ml_engine/deploy/README.md b/components/gcp/ml_engine/deploy/README.md index c41fabe8bd8..e4f10a7b0d6 100644 --- a/components/gcp/ml_engine/deploy/README.md +++ b/components/gcp/ml_engine/deploy/README.md @@ -110,7 +110,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp mlengine_deploy_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/deploy/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/ml_engine/deploy/component.yaml') help(mlengine_deploy_op) ``` diff --git a/components/gcp/ml_engine/deploy/component.yaml b/components/gcp/ml_engine/deploy/component.yaml index e6d07f943f8..dfe81adb806 100644 --- a/components/gcp/ml_engine/deploy/component.yaml +++ b/components/gcp/ml_engine/deploy/component.yaml @@ -95,7 +95,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.ml_engine, deploy, --model_uri, {inputValue: model_uri}, diff --git a/components/gcp/ml_engine/deploy/sample.ipynb b/components/gcp/ml_engine/deploy/sample.ipynb index 2037bd2416b..c39d2e40393 100644 --- a/components/gcp/ml_engine/deploy/sample.ipynb +++ b/components/gcp/ml_engine/deploy/sample.ipynb @@ -128,7 +128,7 @@ "import kfp.components as comp\n", "\n", "mlengine_deploy_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/deploy/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/ml_engine/deploy/component.yaml')\n", "help(mlengine_deploy_op)" ] }, diff --git a/components/gcp/ml_engine/train/README.md b/components/gcp/ml_engine/train/README.md index 2e843f38f0a..4fbd91abf59 100644 --- a/components/gcp/ml_engine/train/README.md +++ b/components/gcp/ml_engine/train/README.md @@ -86,7 +86,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp mlengine_train_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/train/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/ml_engine/train/component.yaml') help(mlengine_train_op) ``` diff --git a/components/gcp/ml_engine/train/component.yaml b/components/gcp/ml_engine/train/component.yaml index bcdca54ab5b..122315eede8 100644 --- a/components/gcp/ml_engine/train/component.yaml +++ b/components/gcp/ml_engine/train/component.yaml @@ -103,7 +103,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:1449d08aeeeb47731d019ea046d90904d9c77953 args: [ kfp_component.google.ml_engine, train, --project_id, {inputValue: project_id}, diff --git a/components/gcp/ml_engine/train/sample.ipynb b/components/gcp/ml_engine/train/sample.ipynb index c36b84602aa..99218823879 100644 --- a/components/gcp/ml_engine/train/sample.ipynb +++ b/components/gcp/ml_engine/train/sample.ipynb @@ -104,7 +104,7 @@ "import kfp.components as comp\n", "\n", "mlengine_train_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/train/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/ml_engine/train/component.yaml')\n", "help(mlengine_train_op)" ] }, diff --git a/components/kubeflow/deployer/component.yaml b/components/kubeflow/deployer/component.yaml index 583615f702f..e39a7067c52 100644 --- a/components/kubeflow/deployer/component.yaml +++ b/components/kubeflow/deployer/component.yaml @@ -11,7 +11,7 @@ inputs: # - {name: Endppoint URI, type: Serving URI, description: 'URI of the deployed prediction service..'} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:1449d08aeeeb47731d019ea046d90904d9c77953 command: [/bin/deploy.sh] args: [ --model-export-path, {inputValue: Model dir}, diff --git a/components/kubeflow/dnntrainer/component.yaml b/components/kubeflow/dnntrainer/component.yaml index 53dbd3f38e9..6a5ff14faf7 100644 --- a/components/kubeflow/dnntrainer/component.yaml +++ b/components/kubeflow/dnntrainer/component.yaml @@ -16,7 +16,7 @@ outputs: - {name: MLPipeline UI metadata, type: UI metadata} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:1449d08aeeeb47731d019ea046d90904d9c77953 command: [python2, -m, trainer.task] args: [ --transformed-data-dir, {inputValue: Transformed data dir}, diff --git a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py index 512fe769480..63ad5d4e58e 100644 --- a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py +++ b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py @@ -17,7 +17,7 @@ def kubeflow_tfjob_launcher_op(container_image, command, number_of_workers: int, number_of_parameter_servers: int, tfjob_timeout_minutes: int, output_dir=None, step_name='TFJob-launcher'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:151c5349f13bea9d626c988563c04c0a86210c21', + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:1449d08aeeeb47731d019ea046d90904d9c77953', arguments = [ '--workers', number_of_workers, '--pss', number_of_parameter_servers, diff --git a/components/kubeflow/launcher/src/train.template.yaml b/components/kubeflow/launcher/src/train.template.yaml index 4e8d660e72b..6fa1ffbc6ff 100644 --- a/components/kubeflow/launcher/src/train.template.yaml +++ b/components/kubeflow/launcher/src/train.template.yaml @@ -26,7 +26,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:1449d08aeeeb47731d019ea046d90904d9c77953 command: - python - -m @@ -49,7 +49,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:1449d08aeeeb47731d019ea046d90904d9c77953 command: - python - -m @@ -72,7 +72,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:1449d08aeeeb47731d019ea046d90904d9c77953 command: - python - -m diff --git a/components/local/confusion_matrix/component.yaml b/components/local/confusion_matrix/component.yaml index 2de25cf5a29..d8515115a0e 100644 --- a/components/local/confusion_matrix/component.yaml +++ b/components/local/confusion_matrix/component.yaml @@ -9,7 +9,7 @@ outputs: - {name: MLPipeline Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:1449d08aeeeb47731d019ea046d90904d9c77953 command: [python2, /ml/confusion_matrix.py] args: [ --predictions, {inputValue: Predictions}, diff --git a/components/local/roc/component.yaml b/components/local/roc/component.yaml index e7bfacf505b..c2cda9f3015 100644 --- a/components/local/roc/component.yaml +++ b/components/local/roc/component.yaml @@ -11,7 +11,7 @@ outputs: - {name: MLPipeline Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:151c5349f13bea9d626c988563c04c0a86210c21 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:1449d08aeeeb47731d019ea046d90904d9c77953 command: [python2, /ml/roc.py] args: [ --predictions, {inputValue: Predictions dir}, diff --git a/samples/core/ai-platform/Chicago Crime Pipeline.ipynb b/samples/core/ai-platform/Chicago Crime Pipeline.ipynb index c335ca48c01..f216b0ae421 100644 --- a/samples/core/ai-platform/Chicago Crime Pipeline.ipynb +++ b/samples/core/ai-platform/Chicago Crime Pipeline.ipynb @@ -112,7 +112,7 @@ "outputs": [], "source": [ "bigquery_query_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/bigquery/query/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/bigquery/query/component.yaml')\n", "\n", "QUERY = \"\"\"\n", " SELECT count(*) as count, TIMESTAMP_TRUNC(date, DAY) as day\n", @@ -148,7 +148,7 @@ "outputs": [], "source": [ "mlengine_train_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/train/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/ml_engine/train/component.yaml')\n", "\n", "def train(project_id,\n", " trainer_args,\n", @@ -186,7 +186,7 @@ "outputs": [], "source": [ "mlengine_deploy_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/deploy/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/gcp/ml_engine/deploy/component.yaml')\n", "\n", "def deploy(\n", " project_id,\n", diff --git a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb index 25e98ce2233..c1f4454d17b 100644 --- a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb +++ b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb @@ -71,7 +71,7 @@ "EXPERIMENT_NAME = 'serving_component'\n", "MODEL_VERSION = '1' # A number representing the version model \n", "OUTPUT_BUCKET = 'gs://%s-serving-component' % PROJECT_NAME # A GCS bucket for asset outputs\n", - "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:151c5349f13bea9d626c988563c04c0a86210c21'\n", + "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:1449d08aeeeb47731d019ea046d90904d9c77953'\n", "MODEL_PATH = '%s/%s' % (OUTPUT_BUCKET,MODEL_NAME) \n", "MODEL_VERSION_PATH = '%s/%s/%s' % (OUTPUT_BUCKET,MODEL_NAME,MODEL_VERSION)" ] diff --git a/samples/core/tfx_cab_classification/tfx_cab_classification.py b/samples/core/tfx_cab_classification/tfx_cab_classification.py index 3dd905f2245..ffa48c2dbb6 100755 --- a/samples/core/tfx_cab_classification/tfx_cab_classification.py +++ b/samples/core/tfx_cab_classification/tfx_cab_classification.py @@ -24,14 +24,14 @@ dataflow_tf_data_validation_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/dataflow/tfdv/component.yaml') dataflow_tf_transform_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/dataflow/tft/component.yaml') -tf_train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/b705631e892bd8181cabcd704e6e6385b16daf90/components/kubeflow/dnntrainer/component.yaml') +tf_train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/kubeflow/dnntrainer/component.yaml') dataflow_tf_model_analyze_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/b705631e892bd8181cabcd704e6e6385b16daf90/components/dataflow/tfma/component.yaml') dataflow_tf_predict_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/b705631e892bd8181cabcd704e6e6385b16daf90/components/dataflow/predict/component.yaml') -confusion_matrix_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/b705631e892bd8181cabcd704e6e6385b16daf90/components/local/confusion_matrix/component.yaml') -roc_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/b705631e892bd8181cabcd704e6e6385b16daf90/components/local/roc/component.yaml') +confusion_matrix_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/local/confusion_matrix/component.yaml') +roc_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/local/roc/component.yaml') -kubeflow_deploy_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/kubeflow/deployer/component.yaml') +kubeflow_deploy_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/kubeflow/deployer/component.yaml') @dsl.pipeline( name='TFX Taxi Cab Classification Pipeline Example', diff --git a/samples/core/xgboost_training_cm/xgboost_training_cm.py b/samples/core/xgboost_training_cm/xgboost_training_cm.py index fd4796db987..ec87c88f7c6 100755 --- a/samples/core/xgboost_training_cm/xgboost_training_cm.py +++ b/samples/core/xgboost_training_cm/xgboost_training_cm.py @@ -20,8 +20,8 @@ from kfp import dsl from kfp import gcp -confusion_matrix_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/b705631e892bd8181cabcd704e6e6385b16daf90/components/local/confusion_matrix/component.yaml') -roc_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/b705631e892bd8181cabcd704e6e6385b16daf90/components/local/roc/component.yaml') +confusion_matrix_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/local/confusion_matrix/component.yaml') +roc_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e7a021ed1da6b0ff21f7ba30422decbdcdda0c20/components/local/roc/component.yaml') # ! Please do not forget to enable the Dataproc API in your cluster https://console.developers.google.com/apis/api/dataproc.googleapis.com/overview @@ -36,7 +36,7 @@ def dataproc_create_cluster_op( ): return dsl.ContainerOp( name='Dataproc - Create cluster', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:151c5349f13bea9d626c988563c04c0a86210c21', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:1449d08aeeeb47731d019ea046d90904d9c77953', arguments=[ '--project', project, '--region', region, @@ -56,7 +56,7 @@ def dataproc_delete_cluster_op( ): return dsl.ContainerOp( name='Dataproc - Delete cluster', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:151c5349f13bea9d626c988563c04c0a86210c21', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:1449d08aeeeb47731d019ea046d90904d9c77953', arguments=[ '--project', project, '--region', region, @@ -76,7 +76,7 @@ def dataproc_analyze_op( ): return dsl.ContainerOp( name='Dataproc - Analyze', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:151c5349f13bea9d626c988563c04c0a86210c21', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:1449d08aeeeb47731d019ea046d90904d9c77953', arguments=[ '--project', project, '--region', region, @@ -103,7 +103,7 @@ def dataproc_transform_op( ): return dsl.ContainerOp( name='Dataproc - Transform', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:151c5349f13bea9d626c988563c04c0a86210c21', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:1449d08aeeeb47731d019ea046d90904d9c77953', arguments=[ '--project', project, '--region', region, @@ -141,7 +141,7 @@ def dataproc_train_op( return dsl.ContainerOp( name='Dataproc - Train XGBoost model', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:151c5349f13bea9d626c988563c04c0a86210c21', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:1449d08aeeeb47731d019ea046d90904d9c77953', arguments=[ '--project', project, '--region', region, @@ -174,7 +174,7 @@ def dataproc_predict_op( ): return dsl.ContainerOp( name='Dataproc - Predict with XGBoost model', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:151c5349f13bea9d626c988563c04c0a86210c21', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:1449d08aeeeb47731d019ea046d90904d9c77953', arguments=[ '--project', project, '--region', region,