diff --git a/components/dataflow/predict/component.yaml b/components/dataflow/predict/component.yaml index 50bd9770738..5e05ab570a2 100644 --- a/components/dataflow/predict/component.yaml +++ b/components/dataflow/predict/component.yaml @@ -15,7 +15,7 @@ outputs: - {name: Predictions dir, type: GCSPath, description: 'GCS or local directory.'} #Will contain prediction_results-* and schema.json files; TODO: Split outputs and replace dir with single file # type: {GCSPath: {path_type: Directory}} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:b0147bdbed9f25212408e0468a475289e80e0406 command: [python2, /ml/predict.py] args: [ --data, {inputValue: Data file pattern}, diff --git a/components/dataflow/tfdv/component.yaml b/components/dataflow/tfdv/component.yaml index a07fb31287c..bc186561210 100644 --- a/components/dataflow/tfdv/component.yaml +++ b/components/dataflow/tfdv/component.yaml @@ -18,7 +18,7 @@ outputs: - {name: Validation result, type: String, description: Indicates whether anomalies were detected or not.} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:b0147bdbed9f25212408e0468a475289e80e0406 command: [python2, /ml/validate.py] args: [ --csv-data-for-inference, {inputValue: Inference data}, diff --git a/components/dataflow/tfma/component.yaml b/components/dataflow/tfma/component.yaml index 1eead992608..d3900f81a93 100644 --- a/components/dataflow/tfma/component.yaml +++ b/components/dataflow/tfma/component.yaml @@ -17,7 +17,7 @@ outputs: - {name: Analysis results dir, type: GCSPath, description: GCS or local directory where the analysis results should were written.} # type: {GCSPath: {path_type: Directory}} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:b0147bdbed9f25212408e0468a475289e80e0406 command: [python2, /ml/model_analysis.py] args: [ --model, {inputValue: Model}, diff --git a/components/dataflow/tft/component.yaml b/components/dataflow/tft/component.yaml index de356d4506d..d96b1f17052 100644 --- a/components/dataflow/tft/component.yaml +++ b/components/dataflow/tft/component.yaml @@ -12,7 +12,7 @@ outputs: - {name: Transformed data dir, type: GCSPath} # type: {GCSPath: {path_type: Directory}} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:b0147bdbed9f25212408e0468a475289e80e0406 command: [python2, /ml/transform.py] args: [ --train, {inputValue: Training data file pattern}, diff --git a/components/gcp/bigquery/query/component.yaml b/components/gcp/bigquery/query/component.yaml index 3edc4298d17..118f20976fa 100644 --- a/components/gcp/bigquery/query/component.yaml +++ b/components/gcp/bigquery/query/component.yaml @@ -54,7 +54,7 @@ outputs: type: GCSPath implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.bigquery, query, --query, {inputValue: query}, diff --git a/components/gcp/dataflow/launch_python/component.yaml b/components/gcp/dataflow/launch_python/component.yaml index ace78957cbc..7fcadb2bb4a 100644 --- a/components/gcp/dataflow/launch_python/component.yaml +++ b/components/gcp/dataflow/launch_python/component.yaml @@ -48,7 +48,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataflow, launch_python, --python_file_path, {inputValue: python_file_path}, diff --git a/components/gcp/dataflow/launch_template/component.yaml b/components/gcp/dataflow/launch_template/component.yaml index 72398a6d2eb..4b348a67de5 100644 --- a/components/gcp/dataflow/launch_template/component.yaml +++ b/components/gcp/dataflow/launch_template/component.yaml @@ -58,7 +58,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataflow, launch_template, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/create_cluster/component.yaml b/components/gcp/dataproc/create_cluster/component.yaml index 51611880956..d2659f9163d 100644 --- a/components/gcp/dataproc/create_cluster/component.yaml +++ b/components/gcp/dataproc/create_cluster/component.yaml @@ -65,7 +65,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataproc, create_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/delete_cluster/component.yaml b/components/gcp/dataproc/delete_cluster/component.yaml index 30cb279949f..9cbef1fbf3e 100644 --- a/components/gcp/dataproc/delete_cluster/component.yaml +++ b/components/gcp/dataproc/delete_cluster/component.yaml @@ -33,7 +33,7 @@ inputs: type: Integer implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataproc, delete_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_hadoop_job/component.yaml b/components/gcp/dataproc/submit_hadoop_job/component.yaml index 66533e97fbd..781ab883300 100644 --- a/components/gcp/dataproc/submit_hadoop_job/component.yaml +++ b/components/gcp/dataproc/submit_hadoop_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataproc, submit_hadoop_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_hive_job/component.yaml b/components/gcp/dataproc/submit_hive_job/component.yaml index 4d1a2505849..350467ef7cb 100644 --- a/components/gcp/dataproc/submit_hive_job/component.yaml +++ b/components/gcp/dataproc/submit_hive_job/component.yaml @@ -70,7 +70,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataproc, submit_hive_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_pig_job/component.yaml b/components/gcp/dataproc/submit_pig_job/component.yaml index 18ae945d145..711c8079cb1 100644 --- a/components/gcp/dataproc/submit_pig_job/component.yaml +++ b/components/gcp/dataproc/submit_pig_job/component.yaml @@ -70,7 +70,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataproc, submit_pig_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_pyspark_job/component.yaml b/components/gcp/dataproc/submit_pyspark_job/component.yaml index 476540f1d06..94a91a63660 100644 --- a/components/gcp/dataproc/submit_pyspark_job/component.yaml +++ b/components/gcp/dataproc/submit_pyspark_job/component.yaml @@ -64,7 +64,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataproc, submit_pyspark_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_spark_job/component.yaml b/components/gcp/dataproc/submit_spark_job/component.yaml index e833f3444a9..cea2568b4e2 100644 --- a/components/gcp/dataproc/submit_spark_job/component.yaml +++ b/components/gcp/dataproc/submit_spark_job/component.yaml @@ -71,7 +71,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataproc, submit_spark_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_sparksql_job/component.yaml b/components/gcp/dataproc/submit_sparksql_job/component.yaml index d5a0d13bf66..b6f6ac2b275 100644 --- a/components/gcp/dataproc/submit_sparksql_job/component.yaml +++ b/components/gcp/dataproc/submit_sparksql_job/component.yaml @@ -70,7 +70,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.dataproc, submit_sparksql_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/ml_engine/batch_predict/component.yaml b/components/gcp/ml_engine/batch_predict/component.yaml index d08290b2f53..0677cf50a81 100644 --- a/components/gcp/ml_engine/batch_predict/component.yaml +++ b/components/gcp/ml_engine/batch_predict/component.yaml @@ -64,7 +64,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.ml_engine, batch_predict, --project_id, {inputValue: project_id}, diff --git a/components/gcp/ml_engine/deploy/component.yaml b/components/gcp/ml_engine/deploy/component.yaml index 51b2208e85a..1b92e1fc7e1 100644 --- a/components/gcp/ml_engine/deploy/component.yaml +++ b/components/gcp/ml_engine/deploy/component.yaml @@ -90,7 +90,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.ml_engine, deploy, --model_uri, {inputValue: model_uri}, diff --git a/components/gcp/ml_engine/train/component.yaml b/components/gcp/ml_engine/train/component.yaml index de54d9802cc..8803b545b62 100644 --- a/components/gcp/ml_engine/train/component.yaml +++ b/components/gcp/ml_engine/train/component.yaml @@ -98,7 +98,7 @@ outputs: type: GCSPath implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:b0147bdbed9f25212408e0468a475289e80e0406 args: [ kfp_component.google.ml_engine, train, --project_id, {inputValue: project_id}, diff --git a/components/kubeflow/deployer/component.yaml b/components/kubeflow/deployer/component.yaml index 2b24ac2f21f..0518499d304 100644 --- a/components/kubeflow/deployer/component.yaml +++ b/components/kubeflow/deployer/component.yaml @@ -11,7 +11,7 @@ inputs: # - {name: Endppoint URI, type: Serving URI, description: 'URI of the deployed prediction service..'} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:b0147bdbed9f25212408e0468a475289e80e0406 command: [/bin/deploy.sh] args: [ --model-export-path, {inputValue: Model dir}, diff --git a/components/kubeflow/dnntrainer/component.yaml b/components/kubeflow/dnntrainer/component.yaml index ba84c326f85..eafa3fea6c6 100644 --- a/components/kubeflow/dnntrainer/component.yaml +++ b/components/kubeflow/dnntrainer/component.yaml @@ -15,7 +15,7 @@ outputs: - {name: Training output dir, type: GCSPath, description: 'GCS or local directory.'} # type: {GCSPath: {path_type: Directory}} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:b0147bdbed9f25212408e0468a475289e80e0406 command: [python2, -m, trainer.task] args: [ --transformed-data-dir, {inputValue: Transformed data dir}, diff --git a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py index 526b2b5348d..f992a93d52c 100644 --- a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py +++ b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py @@ -17,7 +17,7 @@ def kubeflow_tfjob_launcher_op(container_image, command, number_of_workers: int, number_of_parameter_servers: int, tfjob_timeout_minutes: int, output_dir=None, step_name='TFJob-launcher'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:e20fad3e161e88226c83437271adb063221459b9', + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:b0147bdbed9f25212408e0468a475289e80e0406', arguments = [ '--workers', number_of_workers, '--pss', number_of_parameter_servers, diff --git a/components/kubeflow/launcher/src/train.template.yaml b/components/kubeflow/launcher/src/train.template.yaml index 4a03e2c3c2c..f41e2bf13a3 100644 --- a/components/kubeflow/launcher/src/train.template.yaml +++ b/components/kubeflow/launcher/src/train.template.yaml @@ -26,7 +26,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:b0147bdbed9f25212408e0468a475289e80e0406 command: - python - -m @@ -49,7 +49,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:b0147bdbed9f25212408e0468a475289e80e0406 command: - python - -m @@ -72,7 +72,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:b0147bdbed9f25212408e0468a475289e80e0406 command: - python - -m diff --git a/components/local/confusion_matrix/component.yaml b/components/local/confusion_matrix/component.yaml index 324d977d33a..96703dfb984 100644 --- a/components/local/confusion_matrix/component.yaml +++ b/components/local/confusion_matrix/component.yaml @@ -9,7 +9,7 @@ inputs: # - {name: Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:b0147bdbed9f25212408e0468a475289e80e0406 command: [python2, /ml/confusion_matrix.py] args: [ --predictions, {inputValue: Predictions}, diff --git a/components/local/roc/component.yaml b/components/local/roc/component.yaml index dc889d36ead..24297d79424 100644 --- a/components/local/roc/component.yaml +++ b/components/local/roc/component.yaml @@ -11,7 +11,7 @@ inputs: # - {name: Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:e20fad3e161e88226c83437271adb063221459b9 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:b0147bdbed9f25212408e0468a475289e80e0406 command: [python2, /ml/roc.py] args: [ --predictions, {inputValue: Predictions dir}, diff --git a/samples/kubeflow-tf/kubeflow-training-classification.py b/samples/kubeflow-tf/kubeflow-training-classification.py index dea5957bd8e..d95e9ba8761 100755 --- a/samples/kubeflow-tf/kubeflow-training-classification.py +++ b/samples/kubeflow-tf/kubeflow-training-classification.py @@ -68,7 +68,7 @@ def kubeflow_training(output, project, ).apply(gcp.use_gcp_secret('user-gcp-sa')) if use_gpu: - training.image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:e20fad3e161e88226c83437271adb063221459b9', + training.image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:b0147bdbed9f25212408e0468a475289e80e0406', training.set_gpu_limit(1) prediction = dataflow_tf_predict_op( diff --git a/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb b/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb index f3434ee577f..96fb33c3f47 100644 --- a/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb +++ b/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb @@ -44,13 +44,13 @@ "EVAL_DATA = 'gs://ml-pipeline-playground/tfx/taxi-cab-classification/eval.csv'\n", "HIDDEN_LAYER_SIZE = '1500'\n", "STEPS = 3000\n", - "DATAFLOW_TFDV_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:e20fad3e161e88226c83437271adb063221459b9'\n", - "DATAFLOW_TFT_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:e20fad3e161e88226c83437271adb063221459b9'\n", - "DATAFLOW_TFMA_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:e20fad3e161e88226c83437271adb063221459b9'\n", - "DATAFLOW_TF_PREDICT_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:e20fad3e161e88226c83437271adb063221459b9'\n", - "KUBEFLOW_TF_TRAINER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:e20fad3e161e88226c83437271adb063221459b9'\n", - "KUBEFLOW_TF_TRAINER_GPU_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:e20fad3e161e88226c83437271adb063221459b9'\n", - "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:e20fad3e161e88226c83437271adb063221459b9'\n", + "DATAFLOW_TFDV_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:b0147bdbed9f25212408e0468a475289e80e0406'\n", + "DATAFLOW_TFT_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:b0147bdbed9f25212408e0468a475289e80e0406'\n", + "DATAFLOW_TFMA_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:b0147bdbed9f25212408e0468a475289e80e0406'\n", + "DATAFLOW_TF_PREDICT_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:b0147bdbed9f25212408e0468a475289e80e0406'\n", + "KUBEFLOW_TF_TRAINER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:b0147bdbed9f25212408e0468a475289e80e0406'\n", + "KUBEFLOW_TF_TRAINER_GPU_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:b0147bdbed9f25212408e0468a475289e80e0406'\n", + "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:b0147bdbed9f25212408e0468a475289e80e0406'\n", "DEPLOYER_MODEL = 'notebook_tfx_taxi'\n", "DEPLOYER_VERSION_DEV = 'dev'\n", "DEPLOYER_VERSION_PROD = 'prod'\n", diff --git a/samples/xgboost-spark/xgboost-training-cm.py b/samples/xgboost-spark/xgboost-training-cm.py index 636a4c62531..a582249856f 100755 --- a/samples/xgboost-spark/xgboost-training-cm.py +++ b/samples/xgboost-spark/xgboost-training-cm.py @@ -36,7 +36,7 @@ def dataproc_create_cluster_op( ): return dsl.ContainerOp( name='Dataproc - Create cluster', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:e20fad3e161e88226c83437271adb063221459b9', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:b0147bdbed9f25212408e0468a475289e80e0406', arguments=[ '--project', project, '--region', region, @@ -56,7 +56,7 @@ def dataproc_delete_cluster_op( ): return dsl.ContainerOp( name='Dataproc - Delete cluster', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:e20fad3e161e88226c83437271adb063221459b9', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:b0147bdbed9f25212408e0468a475289e80e0406', arguments=[ '--project', project, '--region', region, @@ -76,7 +76,7 @@ def dataproc_analyze_op( ): return dsl.ContainerOp( name='Dataproc - Analyze', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:e20fad3e161e88226c83437271adb063221459b9', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:b0147bdbed9f25212408e0468a475289e80e0406', arguments=[ '--project', project, '--region', region, @@ -103,7 +103,7 @@ def dataproc_transform_op( ): return dsl.ContainerOp( name='Dataproc - Transform', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:e20fad3e161e88226c83437271adb063221459b9', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:b0147bdbed9f25212408e0468a475289e80e0406', arguments=[ '--project', project, '--region', region, @@ -141,7 +141,7 @@ def dataproc_train_op( return dsl.ContainerOp( name='Dataproc - Train XGBoost model', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:e20fad3e161e88226c83437271adb063221459b9', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:b0147bdbed9f25212408e0468a475289e80e0406', arguments=[ '--project', project, '--region', region, @@ -174,7 +174,7 @@ def dataproc_predict_op( ): return dsl.ContainerOp( name='Dataproc - Predict with XGBoost model', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:e20fad3e161e88226c83437271adb063221459b9', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:b0147bdbed9f25212408e0468a475289e80e0406', arguments=[ '--project', project, '--region', region,