From 57808877ae16389bf889e9f7324e47e340959043 Mon Sep 17 00:00:00 2001 From: nealgao Date: Tue, 27 Nov 2018 15:28:51 -0800 Subject: [PATCH] update image tag with the new release (#393) --- bootstrapper.yaml | 2 +- .../launcher/kubeflow_tfjob_launcher_op.py | 2 +- .../kubeflow/launcher/src/train.template.yaml | 6 +++--- .../kubeflow-training-classification.py | 8 ++++---- ...eFlow Pipeline Using TFX OSS Components.ipynb | 14 +++++++------- .../Lightweight Python components - basics.ipynb | 4 ++-- samples/tfx/taxi-cab-classification-pipeline.py | 12 ++++++------ samples/xgboost-spark/xgboost-training-cm.py | 16 ++++++++-------- uninstaller.yaml | 2 +- 9 files changed, 33 insertions(+), 33 deletions(-) diff --git a/bootstrapper.yaml b/bootstrapper.yaml index c07c46c75c4..28b46df8edf 100644 --- a/bootstrapper.yaml +++ b/bootstrapper.yaml @@ -65,7 +65,7 @@ spec: spec: containers: - name: deploy - image: gcr.io/ml-pipeline/bootstrapper:0.1.2 #TODO-release: update the release tag for the next release + image: gcr.io/ml-pipeline/bootstrapper:0.1.3-rc.2 #TODO-release: update the release tag for the next release imagePullPolicy: 'Always' # Additional parameter available: args: [ diff --git a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py index 778f33ba95c..42edda93eb3 100644 --- a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py +++ b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py @@ -17,7 +17,7 @@ def kubeflow_tfjob_launcher_op(container_image, command, number_of_workers: int, number_of_parameter_servers: int, tfjob_timeout_minutes: int, output_dir=None, step_name='TFJob-launcher'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:0.1.0',#TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:0.1.3-rc.2',#TODO-release: update the release tag for the next release arguments = [ '--workers', number_of_workers, '--pss', number_of_parameter_servers, diff --git a/components/kubeflow/launcher/src/train.template.yaml b/components/kubeflow/launcher/src/train.template.yaml index 788a7b8bcbd..aed2b90a2b1 100644 --- a/components/kubeflow/launcher/src/train.template.yaml +++ b/components/kubeflow/launcher/src/train.template.yaml @@ -26,7 +26,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.0 #TODO-release: update the release tag for the next release + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.3-rc.2 #TODO-release: update the release tag for the next release command: - python - -m @@ -38,7 +38,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.0 #TODO-release: update the release tag for the next release + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.3-rc.2 #TODO-release: update the release tag for the next release command: - python - -m @@ -50,7 +50,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.0 #TODO-release: update the release tag for the next release + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.3-rc.2 #TODO-release: update the release tag for the next release command: - python - -m diff --git a/samples/kubeflow-tf/kubeflow-training-classification.py b/samples/kubeflow-tf/kubeflow-training-classification.py index aaa3241b899..43911e52a95 100755 --- a/samples/kubeflow-tf/kubeflow-training-classification.py +++ b/samples/kubeflow-tf/kubeflow-training-classification.py @@ -20,7 +20,7 @@ def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', preprocess_mode, preprocess_module: 'GcsUri[text/code/python]', transform_output: 'GcsUri[Directory]', step_name='preprocess'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--train', train_data, '--eval', evaluation_data, @@ -37,7 +37,7 @@ def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', sc def kubeflow_tf_training_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate: float, hidden_layer_size: int, steps: int, target, preprocess_module: 'GcsUri[text/code/python]', training_output: 'GcsUri[Directory]', step_name='training'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--transformed-data-dir', transformed_data_dir, '--schema', schema, @@ -54,7 +54,7 @@ def kubeflow_tf_training_op(transformed_data_dir, schema: 'GcsUri[text/json]', l def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', target: str, model: 'TensorFlow model', predict_mode, project: 'GcpProject', prediction_output: 'GcsUri', step_name='prediction'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--data', evaluation_data, '--schema', schema, @@ -70,7 +70,7 @@ def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json] def confusion_matrix_op(predictions, output, step_name='confusionmatrix'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--predictions', predictions, '--output', output, diff --git a/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb b/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb index d7302bfbcec..a2619037208 100644 --- a/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb +++ b/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb @@ -208,7 +208,7 @@ "def dataflow_tf_data_validation_op(inference_data: 'GcsUri', validation_data: 'GcsUri', column_names: 'GcsUri[text/json]', key_columns, project: 'GcpProject', mode, validation_output: 'GcsUri[Directory]', step_name='validation'):\n", " return dsl.ContainerOp(\n", " name = step_name,\n", - " image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:dev',\n", + " image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:0.1.3-rc.2', #TODO-release: update the release tag for the next release\n", " arguments = [\n", " '--csv-data-for-inference', inference_data,\n", " '--csv-data-to-validate', validation_data,\n", @@ -227,7 +227,7 @@ "def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', preprocess_mode, preprocess_module: 'GcsUri[text/code/python]', transform_output: 'GcsUri[Directory]', step_name='preprocess'):\n", " return dsl.ContainerOp(\n", " name = step_name,\n", - " image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:0.0.42',\n", + " image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:0.1.3-rc.2', #TODO-release: update the release tag for the next release\n", " arguments = [\n", " '--train', train_data,\n", " '--eval', evaluation_data,\n", @@ -244,7 +244,7 @@ "def tf_train_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate: float, hidden_layer_size: int, steps: int, target: str, preprocess_module: 'GcsUri[text/code/python]', training_output: 'GcsUri[Directory]', step_name='training'):\n", " return dsl.ContainerOp(\n", " name = step_name,\n", - " image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.0.42',\n", + " image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.3-rc.2', #TODO-release: update the release tag for the next release\n", " arguments = [\n", " '--transformed-data-dir', transformed_data_dir,\n", " '--schema', schema,\n", @@ -261,7 +261,7 @@ "def dataflow_tf_model_analyze_op(model: 'TensorFlow model', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', analyze_mode, analyze_slice_column, analysis_output: 'GcsUri', step_name='analysis'):\n", " return dsl.ContainerOp(\n", " name = step_name,\n", - " image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:0.0.42',\n", + " image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:0.1.3-rc.2', #TODO-release: update the release tag for the next release\n", " arguments = [\n", " '--model', model,\n", " '--eval', evaluation_data,\n", @@ -278,7 +278,7 @@ "def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', target: str, model: 'TensorFlow model', predict_mode, project: 'GcpProject', prediction_output: 'GcsUri', step_name='prediction'):\n", " return dsl.ContainerOp(\n", " name = step_name,\n", - " image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:0.0.42',\n", + " image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:0.1.3-rc.2', #TODO-release: update the release tag for the next release\n", " arguments = [\n", " '--data', evaluation_data,\n", " '--schema', schema,\n", @@ -294,7 +294,7 @@ "def kubeflow_deploy_op(model: 'TensorFlow model', tf_server_name, step_name='deploy'):\n", " return dsl.ContainerOp(\n", " name = step_name,\n", - " image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:dev',\n", + " image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:0.1.3-rc.2', #TODO-release: update the release tag for the next release\n", " arguments = [\n", " '--model-path', model,\n", " '--server-name', tf_server_name\n", @@ -792,7 +792,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.4" + "version": "3.5.3" } }, "nbformat": 4, diff --git a/samples/notebooks/Lightweight Python components - basics.ipynb b/samples/notebooks/Lightweight Python components - basics.ipynb index 0905ffa1918..3cda00de385 100644 --- a/samples/notebooks/Lightweight Python components - basics.ipynb +++ b/samples/notebooks/Lightweight Python components - basics.ipynb @@ -26,7 +26,7 @@ "outputs": [], "source": [ "#Install the SDK\n", - "!pip3 install https://storage.googleapis.com/ml-pipeline/release/0.1.1/kfp.tar.gz --upgrade\n" + "!pip3 install https://storage.googleapis.com/ml-pipeline/release/0.1.3-rc.2/kfp.tar.gz --upgrade\n" ] }, { @@ -268,7 +268,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.4" + "version": "3.5.3" } }, "nbformat": 4, diff --git a/samples/tfx/taxi-cab-classification-pipeline.py b/samples/tfx/taxi-cab-classification-pipeline.py index be83eb1bda3..3c219b688d1 100755 --- a/samples/tfx/taxi-cab-classification-pipeline.py +++ b/samples/tfx/taxi-cab-classification-pipeline.py @@ -20,7 +20,7 @@ def dataflow_tf_data_validation_op(inference_data: 'GcsUri', validation_data: 'GcsUri', column_names: 'GcsUri[text/json]', key_columns, project: 'GcpProject', mode, validation_output: 'GcsUri[Directory]', step_name='validation'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--csv-data-for-inference', inference_data, '--csv-data-to-validate', validation_data, @@ -39,7 +39,7 @@ def dataflow_tf_data_validation_op(inference_data: 'GcsUri', validation_data: 'G def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', preprocess_mode, preprocess_module: 'GcsUri[text/code/python]', transform_output: 'GcsUri[Directory]', step_name='preprocess'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--train', train_data, '--eval', evaluation_data, @@ -56,7 +56,7 @@ def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', sc def tf_train_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate: float, hidden_layer_size: int, steps: int, target: str, preprocess_module: 'GcsUri[text/code/python]', training_output: 'GcsUri[Directory]', step_name='training'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--transformed-data-dir', transformed_data_dir, '--schema', schema, @@ -73,7 +73,7 @@ def tf_train_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate def dataflow_tf_model_analyze_op(model: 'TensorFlow model', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', analyze_mode, analyze_slice_column, analysis_output: 'GcsUri', step_name='analysis'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--model', model, '--eval', evaluation_data, @@ -90,7 +90,7 @@ def dataflow_tf_model_analyze_op(model: 'TensorFlow model', evaluation_data: 'Gc def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', target: str, model: 'TensorFlow model', predict_mode, project: 'GcpProject', prediction_output: 'GcsUri', step_name='prediction'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--data', evaluation_data, '--schema', schema, @@ -106,7 +106,7 @@ def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json] def kubeflow_deploy_op(model: 'TensorFlow model', tf_server_name, step_name='deploy'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:0.1.0', #TODO-release: update the release tag for the next release + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments = [ '--model-path', model, '--server-name', tf_server_name diff --git a/samples/xgboost-spark/xgboost-training-cm.py b/samples/xgboost-spark/xgboost-training-cm.py index 0f85683b4c3..8a1ed3ba448 100755 --- a/samples/xgboost-spark/xgboost-training-cm.py +++ b/samples/xgboost-spark/xgboost-training-cm.py @@ -25,7 +25,7 @@ class CreateClusterOp(dsl.ContainerOp): def __init__(self, name, project, region, staging): super(CreateClusterOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:0.1.0', #TODO-release: update the release tag for the next release + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments=[ '--project', project, '--region', region, @@ -40,7 +40,7 @@ class DeleteClusterOp(dsl.ContainerOp): def __init__(self, name, project, region): super(DeleteClusterOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:0.1.0', #TODO-release: update the release tag for the next release + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments=[ '--project', project, '--region', region, @@ -54,7 +54,7 @@ class AnalyzeOp(dsl.ContainerOp): def __init__(self, name, project, region, cluster_name, schema, train_data, output): super(AnalyzeOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:0.1.0', #TODO-release: update the release tag for the next release + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments=[ '--project', project, '--region', region, @@ -72,7 +72,7 @@ def __init__(self, name, project, region, cluster_name, train_data, eval_data, target, analysis, output): super(TransformOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:0.1.0', #TODO-release: update the release tag for the next release + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments=[ '--project', project, '--region', region, @@ -97,7 +97,7 @@ def __init__(self, name, project, region, cluster_name, train_data, eval_data, super(TrainerOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:0.1.0', #TODO-release: update the release tag for the next release + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments=[ '--project', project, '--region', region, @@ -120,7 +120,7 @@ class PredictOp(dsl.ContainerOp): def __init__(self, name, project, region, cluster_name, data, model, target, analysis, output): super(PredictOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:0.1.0', #TODO-release: update the release tag for the next release + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments=[ '--project', project, '--region', region, @@ -140,7 +140,7 @@ class ConfusionMatrixOp(dsl.ContainerOp): def __init__(self, name, predictions, output): super(ConfusionMatrixOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:0.1.0', #TODO-release: update the release tag for the next release + image='gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments=[ '--output', output, '--predictions', predictions @@ -152,7 +152,7 @@ class RocOp(dsl.ContainerOp): def __init__(self, name, predictions, trueclass, output): super(RocOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-local-roc:0.1.0', #TODO-release: update the release tag for the next release + image='gcr.io/ml-pipeline/ml-pipeline-local-roc:0.1.3-rc.2', #TODO-release: update the release tag for the next release arguments=[ '--output', output, '--predictions', predictions, diff --git a/uninstaller.yaml b/uninstaller.yaml index 98b0ceb3e59..dea3e53b588 100644 --- a/uninstaller.yaml +++ b/uninstaller.yaml @@ -24,7 +24,7 @@ spec: spec: containers: - name: uninstaller - image: gcr.io/ml-pipeline/bootstrapper:0.1.0 #TODO-release: update the release tag for the next release + image: gcr.io/ml-pipeline/bootstrapper:0.1.3-rc.2 #TODO-release: update the release tag for the next release imagePullPolicy: 'Always' # Additional parameter available: args: [