diff --git a/README.md b/README.md index 9424c33be63..a0a16d43cb3 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -[![Build Status](https://travis-ci.com/googleprivate/ml.svg?token=JjfzFsYGxZwkHvXFCpwt&branch=master)](https://travis-ci.com/googleprivate/ml) +[![Build Status](https://travis-ci.com/kubeflow/pipelines.svg?token=JjfzFsYGxZwkHvXFCpwt&branch=master)](https://travis-ci.com/kubeflow/pipelines) # ML Pipeline Services - Overview @@ -112,14 +112,14 @@ gcloud container clusters create $CLUSTER_NAME \ ``` Here we choose the cloud-platform scope so the cluster can invoke GCP APIs. You can find all the options for creating a cluster in [here](https://cloud.google.com/sdk/gcloud/reference/container/clusters/create). -Next, grant your user account permission to create new cluster roles. This step is necessary because installing ML Pipelines Services inlcudes installing a few [clusterroles](https://github.com/googleprivate/ml/search?utf8=%E2%9C%93&q=clusterrole+path%3Aml-pipeline%2Fml-pipeline&type=). +Next, grant your user account permission to create new cluster roles. This step is necessary because installing ML Pipelines Services inlcudes installing a few [clusterroles](https://github.com/kubeflow/pipelines/search?utf8=%E2%9C%93&q=clusterrole+path%3Aml-pipeline%2Fml-pipeline&type=). ```bash kubectl create clusterrolebinding ml-pipeline-admin-binding --clusterrole=cluster-admin --user=$(gcloud config get-value account) ``` ## Deploy ML Pipeline Services and Kubeflow -Go to [release page](https://github.com/googleprivate/ml/releases) to find a version of ML Pipeline Services. Deploy the ML Pipeline Services and Kubeflow to your cluster. +Go to [release page](https://github.com/kubeflow/pipelines/releases) to find a version of ML Pipeline Services. Deploy the ML Pipeline Services and Kubeflow to your cluster. For example: ```bash @@ -166,7 +166,7 @@ If you are using Cloud Shell, you could view the UI by open the [web preview](ht If you are using local console instead of Cloud Shell, you can access the ML pipeline UI at [localhost:8080/pipeline](http://localhost:8080/pipeline). ## Run your first TFJob pipeline -See the following authoring guide on how to compile your python pipeline code into workflow tar file. Then, follow [README.md](https://github.com/googleprivate/ml/blob/master/samples/kubeflow-tf/README.md) to deploy your first TFJob pipeline. +See the following authoring guide on how to compile your python pipeline code into workflow tar file. Then, follow [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/kubeflow-tf/README.md) to deploy your first TFJob pipeline. ## Uninstall To uninstall ML pipeline, download the bootstrapper file and change the arguments to the deployment job. @@ -189,7 +189,7 @@ then create job using the updated YAML by running ```kubectl create -f bootstrap # ML Pipeline Services - Authoring Guideline -For more details, see [README.md](https://github.com/googleprivate/ml/blob/master/samples/README.md). +For more details, see [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/README.md). ## Setup * Create a python3 environment. diff --git a/backend/Dockerfile b/backend/Dockerfile index 92b60445140..30b559c3af3 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -7,7 +7,7 @@ RUN curl -fsSL -o /bin/dep https://github.com/golang/dep/releases/download/v0.5. ADD https://github.com/golang/dep/releases/download/v0.5.0/dep-linux-amd64 /usr/bin/dep RUN chmod +x /usr/bin/dep -WORKDIR /go/src/github.com/googleprivate/ml +WORKDIR /go/src/github.com/kubeflow/pipelines COPY . . # Needed for github.com/mattn/go-sqlite3 @@ -22,7 +22,7 @@ ENV COMMIT_SHA=${COMMIT_SHA} WORKDIR /bin COPY --from=builder /bin/apiserver /bin/apiserver -COPY --from=builder /go/src/github.com/googleprivate/ml/third_party/license.txt /bin/license.txt +COPY --from=builder /go/src/github.com/kubeflow/pipelines/third_party/license.txt /bin/license.txt COPY backend/src/apiserver/config/ /config COPY backend/src/apiserver/samples/ /samples diff --git a/backend/Dockerfile.persistenceagent b/backend/Dockerfile.persistenceagent index d0aede20719..c58f6337540 100644 --- a/backend/Dockerfile.persistenceagent +++ b/backend/Dockerfile.persistenceagent @@ -8,7 +8,7 @@ RUN curl -fsSL -o /bin/dep https://github.com/golang/dep/releases/download/v0.5. ADD https://github.com/golang/dep/releases/download/v0.5.0/dep-linux-amd64 /usr/bin/dep RUN chmod +x /usr/bin/dep -WORKDIR /go/src/github.com/googleprivate/ml +WORKDIR /go/src/github.com/kubeflow/pipelines COPY . . # Needed for github.com/mattn/go-sqlite3 @@ -19,7 +19,7 @@ FROM alpine WORKDIR /bin COPY --from=builder /bin/persistence_agent /bin/persistence_agent -COPY --from=builder /go/src/github.com/googleprivate/ml/third_party/license.txt /bin/license.txt +COPY --from=builder /go/src/github.com/kubeflow/pipelines/third_party/license.txt /bin/license.txt RUN chmod +x /bin/persistence_agent CMD persistence_agent \ diff --git a/backend/Dockerfile.scheduledworkflow b/backend/Dockerfile.scheduledworkflow index 4c2b83272e1..0fb7392cb85 100644 --- a/backend/Dockerfile.scheduledworkflow +++ b/backend/Dockerfile.scheduledworkflow @@ -7,7 +7,7 @@ RUN curl -fsSL -o /bin/dep https://github.com/golang/dep/releases/download/v0.5. ADD https://github.com/golang/dep/releases/download/v0.5.0/dep-linux-amd64 /usr/bin/dep RUN chmod +x /usr/bin/dep -WORKDIR /go/src/github.com/googleprivate/ml +WORKDIR /go/src/github.com/kubeflow/pipelines COPY . . # Needed for github.com/mattn/go-sqlite3 @@ -18,7 +18,7 @@ FROM alpine WORKDIR /bin COPY --from=builder /bin/controller /bin/controller -COPY --from=builder /go/src/github.com/googleprivate/ml/third_party/license.txt /bin/license.txt +COPY --from=builder /go/src/github.com/kubeflow/pipelines/third_party/license.txt /bin/license.txt RUN chmod +x /bin/controller CMD /bin/controller -alsologtostderr=true diff --git a/backend/src/crd/hack/update-codegen.sh b/backend/src/crd/hack/update-codegen.sh index fdd671b4751..0e0d0679254 100755 --- a/backend/src/crd/hack/update-codegen.sh +++ b/backend/src/crd/hack/update-codegen.sh @@ -27,6 +27,6 @@ CODEGEN_PKG=${SCRIPT_ROOT}/../../../../../../k8s.io/code-generator echo "CODEGEN_PKG is $CODEGEN_PKG" ${CODEGEN_PKG}/generate-groups.sh "deepcopy,client,informer,lister" \ - github.com/googleprivate/ml/backend/src/crd/pkg/client github.com/googleprivate/ml/backend/src/crd/pkg/apis \ + github.com/kubeflow/pipelines/backend/src/crd/pkg/client github.com/kubeflow/pipelines/backend/src/crd/pkg/apis \ scheduledworkflow:v1alpha1 \ --go-header-file ${SCRIPT_ROOT}/hack/custom-boilerplate.go.txt \ No newline at end of file diff --git a/developer_guide.md b/developer_guide.md index 41def2d2166..03bf860f257 100644 --- a/developer_guide.md +++ b/developer_guide.md @@ -1,6 +1,6 @@ # ML Pipeline Development Guideline -This document describes the development guideline to contribute to ML pipeline project. Please check the [main page](https://github.com/googleprivate/ml/blob/master/README.md) for instruction on how to deploy a ML pipeline system. +This document describes the development guideline to contribute to ML pipeline project. Please check the [main page](https://github.com/kubeflow/pipelines/blob/master/README.md) for instruction on how to deploy a ML pipeline system. ## ML pipeline deployment @@ -17,9 +17,9 @@ The docker container accepts various parameters to customize your deployment. - **--report_usage** whether to report usage for the deployment - **--uninstall** to uninstall everything. -See [bootstrapper.yaml](https://github.com/googleprivate/ml/blob/master/bootstrapper.yaml) for examples on how to pass in parameter. +See [bootstrapper.yaml](https://github.com/kubeflow/pipelines/blob/master/bootstrapper.yaml) for examples on how to pass in parameter. -Alternatively, you can use [deploy.sh](https://github.com/googleprivate/ml/blob/master/ml-pipeline/deploy.sh) if you want to interact with Ksonnet directly. +Alternatively, you can use [deploy.sh](https://github.com/kubeflow/pipelines/blob/master/ml-pipeline/deploy.sh) if you want to interact with Ksonnet directly. To deploy, run the script locally. ```bash $ ml-pipeline/deploy.sh @@ -101,7 +101,7 @@ pip install ./dsl-compiler/ --upgrade && python ./dsl-compiler/tests/main.py ## Integration test ### API server -Check [this](https://github.com/googleprivate/ml/blob/master/test/apiserver/README.md) page for more details. +Check [this](https://github.com/kubeflow/pipelines/blob/master/test/apiserver/README.md) page for more details. ## E2E test TODO: Add instruction diff --git a/frontend/server/package.json b/frontend/server/package.json index 464a5e0905c..325b683bd97 100644 --- a/frontend/server/package.json +++ b/frontend/server/package.json @@ -22,12 +22,12 @@ }, "repository": { "type": "git", - "url": "git+https://github.com/googleprivate/ml.git" + "url": "git+https://github.com/kubeflow/pipelines.git" }, "author": "", "license": "", "bugs": { - "url": "https://github.com/googleprivate/ml/issues" + "url": "https://github.com/kubeflow/pipelines/issues" }, - "homepage": "https://github.com/googleprivate/ml#readme" + "homepage": "https://github.com/kubeflow/pipelines#readme" } diff --git a/ml-pipeline/deploy.sh b/ml-pipeline/deploy.sh index 73d934df553..0b1cdf68627 100755 --- a/ml-pipeline/deploy.sh +++ b/ml-pipeline/deploy.sh @@ -120,7 +120,7 @@ echo "Initialized ksonnet APP completed successfully" # an known issue: https://github.com/ksonnet/ksonnet/issues/232, we are working around by creating # a symbolic links in ./vendor and manually modifying app.yaml # when the repo is public we can do following: -# ks registry add ml-pipeline github.com/googleprivate/ml/tree/master/ml-pipeline +# ks registry add ml-pipeline github.com/kubeflow/pipelines/tree/master/ml-pipeline # ks pkg install ml-pipeline/ml-pipeline BASEDIR=$(cd $(dirname "$0") && pwd) ln -s ${BASEDIR} ${APP_DIR}/vendor/ml-pipeline diff --git a/ml-pipeline/ml-pipeline/parts.yaml b/ml-pipeline/ml-pipeline/parts.yaml index f9682e96506..6fbc233fdad 100644 --- a/ml-pipeline/ml-pipeline/parts.yaml +++ b/ml-pipeline/ml-pipeline/parts.yaml @@ -26,10 +26,10 @@ ], "repository": { "type": "git", - "url": "https://github.com/googleprivate/ml" + "url": "https://github.com/kubeflow/pipelines" }, "bugs": { - "url": "https://github.com/googleprivate/ml/issues" + "url": "https://github.com/kubeflow/pipelines/issues" }, "keywords": [ "ml-pipeline" diff --git a/samples/README.md b/samples/README.md index e7a69b8bb1d..5c1a05b3c3c 100644 --- a/samples/README.md +++ b/samples/README.md @@ -71,7 +71,7 @@ Install [docker](https://www.docker.com/get-docker). ### Step One: Create A Container For Each Component In most cases, you need to create your own container image that includes your program. You can find container -building examples from [here](https://github.com/googleprivate/ml/blob/master/components)(in the directory, go to any subdirectory and then go to “containers” directory). +building examples from [here](https://github.com/kubeflow/pipelines/blob/master/components)(in the directory, go to any subdirectory and then go to “containers” directory). If your component creates some outputs to be fed as inputs to the downstream components, each output has to be a string and needs to be written to a separate local text file by the container image. @@ -155,4 +155,4 @@ args go first and keyword args go next. should all be of that type. The default values will show up in the Pipeline UI but can be overwritten. -See an example [here](https://github.com/googleprivate/ml/blob/master/samples/xgboost-spark/xgboost-training-cm.py). +See an example [here](https://github.com/kubeflow/pipelines/blob/master/samples/xgboost-spark/xgboost-training-cm.py). diff --git a/samples/basic/README.md b/samples/basic/README.md index 7a45ec1fdf9..e15c84c224d 100644 --- a/samples/basic/README.md +++ b/samples/basic/README.md @@ -1,5 +1,5 @@ ## Compile -Follow [README.md](https://github.com/googleprivate/ml/blob/master/samples/README.md) to install the compiler and +Follow [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/README.md) to install the compiler and compile the sample python into workflow yaml. "sequential.yaml" is pre-generated for referencing purpose. diff --git a/samples/kubeflow-tf/README.md b/samples/kubeflow-tf/README.md index 22783b3b4ae..3e2b6869713 100644 --- a/samples/kubeflow-tf/README.md +++ b/samples/kubeflow-tf/README.md @@ -12,7 +12,7 @@ Note: The trainer depends on KubeFlow API Version v1alpha2. ## Compiling the pipeline template -Follow [README.md](https://github.com/googleprivate/ml/blob/master/samples/README.md) to install the compiler and then run the following command to compile the pipeline: +Follow [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/README.md) to install the compiler and then run the following command to compile the pipeline: ```bash dsl-compile --py kubeflow-training-classification.py --output kubeflow-training-classification.tar.gz @@ -29,17 +29,17 @@ The pipeline will require one argument: ## Components Source Preprocessing: - [source code](https://github.com/googleprivate/ml/tree/master/components/dataflow/tft), - [container](https://github.com/googleprivate/ml/tree/master/components/dataflow/containers/tft) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/tft), + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/containers/tft) Training: - [source code](https://github.com/googleprivate/ml/tree/master/components/kubeflow/launcher), - [container](https://github.com/googleprivate/ml/tree/master/components/kubeflow/container/launcher) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/kubeflow/launcher), + [container](https://github.com/kubeflow/pipelines/tree/master/components/kubeflow/container/launcher) Prediction: - [source code](https://github.com/googleprivate/ml/tree/master/components/dataflow/predict), - [container](https://github.com/googleprivate/ml/tree/master/components/dataflow/containers/predict) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/predict), + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/containers/predict) Confusion Matrix: - [source code](https://github.com/googleprivate/ml/tree/master/components/local/evaluation), - [container](https://github.com/googleprivate/ml/tree/master/components/local/containers/confusion_matrix) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/local/evaluation), + [container](https://github.com/kubeflow/pipelines/tree/master/components/local/containers/confusion_matrix) diff --git a/samples/resnet-cmle/README.md b/samples/resnet-cmle/README.md index 18322d98a01..2f7f583930a 100644 --- a/samples/resnet-cmle/README.md +++ b/samples/resnet-cmle/README.md @@ -8,7 +8,7 @@ Training and serving uses Google Cloud Machine Learning Engine. So [Cloud Machin given project. ## Compile -Follow [README.md](https://github.com/googleprivate/ml/blob/master/samples/README.md) to install the compiler and +Follow [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/README.md) to install the compiler and compile your python sample into workflow yaml. ## Deploy @@ -25,13 +25,13 @@ bucket: A Google storage bucket to store results. ## Components Source Preprocessing: - [source code](https://github.com/googleprivate/ml/tree/master/components/resnet-cmle/resnet) - [container](https://github.com/googleprivate/ml/tree/master/components/resnet-cmle/containers/preprocess) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/resnet-cmle/resnet) + [container](https://github.com/kubeflow/pipelines/tree/master/components/resnet-cmle/containers/preprocess) Training: - [source code](https://github.com/googleprivate/ml/tree/master/components/resnet-cmle/resnet) - [container](https://github.com/googleprivate/ml/tree/master/components/resnet-cmle/containers/train) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/resnet-cmle/resnet) + [container](https://github.com/kubeflow/pipelines/tree/master/components/resnet-cmle/containers/train) Deployment: - [source code](https://github.com/googleprivate/ml/tree/master/components/resnet-cmle/resnet) - [container](https://github.com/googleprivate/ml/tree/master/components/resnet-cmle/containers/deploy) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/resnet-cmle/resnet) + [container](https://github.com/kubeflow/pipelines/tree/master/components/resnet-cmle/containers/deploy) diff --git a/samples/tfma/README.md b/samples/tfma/README.md index 21b414f618c..94155edc185 100644 --- a/samples/tfma/README.md +++ b/samples/tfma/README.md @@ -31,7 +31,7 @@ Instructions for enabling that can be found [here](https://cloud.google.com/endp ## Compiling the pipeline template -Follow [README.md](https://github.com/googleprivate/ml/blob/master/samples/README.md) to install the compiler and then run the following to compile the pipeline: +Follow [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/README.md) to install the compiler and then run the following to compile the pipeline: ```bash dsl-compile --py taxi-cab-classification-pipeline.py --output taxi-cab-classification-pipeline.tar.gz @@ -49,17 +49,17 @@ The pipeline will require two arguments: ## Components Source Preprocessing: - [source code](https://github.com/googleprivate/ml/tree/master/components/dataflow/tft) - [container](https://github.com/googleprivate/ml/tree/master/components/dataflow/containers/tft) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/tft) + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/containers/tft) Training: - [source code](https://github.com/googleprivate/ml/tree/master/components/kubeflow/launcher) - [container](https://github.com/googleprivate/ml/tree/master/components/kubeflow/container/launcher) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/kubeflow/launcher) + [container](https://github.com/kubeflow/pipelines/tree/master/components/kubeflow/container/launcher) Analysis: - [source code](https://github.com/googleprivate/ml/tree/master/components/dataflow/tfma) - [container](https://github.com/googleprivate/ml/tree/master/components/dataflow/containers/tfma) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/tfma) + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/containers/tfma) Prediction: - [source code](https://github.com/googleprivate/ml/tree/master/components/dataflow/predict) - [container](https://github.com/googleprivate/ml/tree/master/components/dataflow/containers/predict) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/predict) + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataflow/containers/predict) diff --git a/samples/xgboost-spark/README.md b/samples/xgboost-spark/README.md index 5a385916710..54ef2b3a51c 100644 --- a/samples/xgboost-spark/README.md +++ b/samples/xgboost-spark/README.md @@ -11,7 +11,7 @@ or not. Preprocessing uses Google Cloud DataProc. So the [DataProc API](https://cloud.google.com/endpoints/docs/openapi/enable-api) needs to be enabled for the given project. ## Compile -Follow [README.md](https://github.com/googleprivate/ml/blob/master/samples/README.md) to install the compiler and +Follow [README.md](https://github.com/kubeflow/pipelines/blob/master/samples/README.md) to install the compiler and compile your sample python into workflow yaml. ## Deploy @@ -25,31 +25,31 @@ is a GCP project. ## Components Source Create Cluster: - [source code](https://github.com/googleprivate/ml/tree/master/components/dataproc/xgboost/create_cluster) - [container](https://github.com/googleprivate/ml/tree/master/components/dataproc/containers/create_cluster) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/xgboost/create_cluster) + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/containers/create_cluster) Analyze (step one for preprocessing): - [source code](https://github.com/googleprivate/ml/tree/master/components/dataproc/xgboost/analyze) - [container](https://github.com/googleprivate/ml/tree/master/components/dataproc/containers/analyze) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/xgboost/analyze) + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/containers/analyze) Transform (step two for preprocessing): - [source code](https://github.com/googleprivate/ml/tree/master/components/dataproc/xgboost/transform) - [container](https://github.com/googleprivate/ml/tree/master/components/dataproc/containers/transform) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/xgboost/transform) + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/containers/transform) Distributed Training: - [source code](https://github.com/googleprivate/ml/tree/master/components/dataproc/xgboost/train) - [container](https://github.com/googleprivate/ml/tree/master/components/dataproc/containers/train) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/xgboost/train) + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/containers/train) Distributed Predictions: - [source code](https://github.com/googleprivate/ml/tree/master/components/dataproc/xgboost/predict) - [container](https://github.com/googleprivate/ml/tree/master/components/dataproc/containers/predict) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/xgboost/predict) + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/containers/predict) Confusion Matrix: - [source code](https://github.com/googleprivate/ml/tree/master/components/local/evaluation) - [container](https://github.com/googleprivate/ml/tree/master/components/local/containers/confusion_matrix) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/local/evaluation) + [container](https://github.com/kubeflow/pipelines/tree/master/components/local/containers/confusion_matrix) Delete Cluster: - [source code](https://github.com/googleprivate/ml/tree/master/components/dataproc/xgboost/delete_cluster) - [container](https://github.com/googleprivate/ml/tree/master/components/dataproc/containers/delete_cluster) + [source code](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/xgboost/delete_cluster) + [container](https://github.com/kubeflow/pipelines/tree/master/components/dataproc/containers/delete_cluster) diff --git a/sdk/python/tests/components/test_components.py b/sdk/python/tests/components/test_components.py index e6dd4f2ebc6..e28082fe678 100644 --- a/sdk/python/tests/components/test_components.py +++ b/sdk/python/tests/components/test_components.py @@ -47,7 +47,7 @@ def test_load_component_from_file(self): @unittest.expectedFailure #The repo is non-public and will change soon. TODO: Update the URL and enable the test once we move to a public repo def test_load_component_from_url(self): - url = 'https://raw.githubusercontent.com/googleprivate/ml/638045974d688b473cda9f4516a2cf1d7d1e02dd/sdk/python/tests/components/test_data/python_add.component.yaml' + url = 'https://raw.githubusercontent.com/kubeflow/pipelines/638045974d688b473cda9f4516a2cf1d7d1e02dd/sdk/python/tests/components/test_data/python_add.component.yaml' import requests resp = requests.get(url) diff --git a/test/README.md b/test/README.md index 244ed638455..f21feaed547 100644 --- a/test/README.md +++ b/test/README.md @@ -25,7 +25,7 @@ You could run the tests against a specific commit. ### Setup Here are the one-time steps to prepare for your GKE testing cluster: -- Follow the [main page](https://github.com/googleprivate/ml#setup-gke) to +- Follow the [main page](https://github.com/kubeflow/pipelines#setup-gke) to create a GKE cluster. - Install [Argo](https://github.com/argoproj/argo/blob/master/demo.md#argo-v20-getting-started) in the cluster. If you have Argo CLI installed locally, just run diff --git a/test/api-integration-test/run_test.sh b/test/api-integration-test/run_test.sh index b86b5a0736b..a516d3709d2 100644 --- a/test/api-integration-test/run_test.sh +++ b/test/api-integration-test/run_test.sh @@ -53,7 +53,7 @@ if [ -z "$RESULTS_GCS_DIR" ]; then exit 1 fi -GITHUB_REPO=googleprivate/ml +GITHUB_REPO=kubeflow/pipelines BASE_DIR=/go/src/github.com/${GITHUB_REPO} JUNIT_TEST_RESULT=junit_ApiIntegrationTestOutput.xml TEST_DIR=backend/test diff --git a/test/backend-unit-test/run_test.sh b/test/backend-unit-test/run_test.sh index a79caab700b..83e5fb34fa5 100644 --- a/test/backend-unit-test/run_test.sh +++ b/test/backend-unit-test/run_test.sh @@ -46,7 +46,7 @@ if [ -z "$RESULTS_GCS_DIR" ]; then exit 1 fi -GITHUB_REPO=googleprivate/ml +GITHUB_REPO=kubeflow/pipelines BASE_DIR=/go/src/github.com/${GITHUB_REPO} JUNIT_TEST_RESULT=junit_BackendUnitTestOutput.xml TEST_DIR=backend/src diff --git a/test/build_image.yaml b/test/build_image.yaml index f352d78c439..81013aa81a1 100644 --- a/test/build_image.yaml +++ b/test/build_image.yaml @@ -110,7 +110,7 @@ spec: value: Dockerfile - name: image-name container: - image: gcr.io/ml-pipeline-staging/image-builder:v20181011-0.0.20-18-g5ab78f1c-a9becd + image: gcr.io/ml-pipeline-staging/image-builder:v20181102-0.0.20-195-g2c8e37ab-e3b0c4 imagePullPolicy: 'Always' args: [ "--commit_sha", "{{inputs.parameters.commit-sha}}", diff --git a/test/e2e_test_gke.yaml b/test/e2e_test_gke.yaml index 88719c947ea..ffa7907241c 100644 --- a/test/e2e_test_gke.yaml +++ b/test/e2e_test_gke.yaml @@ -108,7 +108,7 @@ spec: - name: frontend-integration-tests-image value: "gcr.io/{{steps.get-project.outputs.result}}/{{inputs.parameters.commit-sha}}/{{inputs.parameters.frontend-integration-tests-image-suffix}}" #TODO: Uncomment to disable the test on Minikube - #TODO: Change the cluster-type to "minikube" once https://github.com/googleprivate/ml/pull/1285 and related PRs are merged. + #TODO: Change the cluster-type to "minikube" once https://github.com/kubeflow/pipelines/pull/1285 and related PRs are merged. #when: "{{workflow.parameters.cluster-type}} != none" #Do not run the test on Minikube - name: get-project @@ -133,7 +133,7 @@ spec: value: Dockerfile - name: image-suffix container: - image: gcr.io/ml-pipeline-staging/image-builder:v20181011-0.0.20-18-g5ab78f1c-a9becd + image: gcr.io/ml-pipeline-staging/image-builder:v20181102-0.0.20-195-g2c8e37ab-e3b0c4 imagePullPolicy: 'Always' args: [ "--commit_sha", "{{inputs.parameters.commit-sha}}", diff --git a/test/imagebuilder/build.sh b/test/imagebuilder/build.sh index c344efc90f5..8fc8bb4033b 100644 --- a/test/imagebuilder/build.sh +++ b/test/imagebuilder/build.sh @@ -61,7 +61,7 @@ ssh-keygen -F github.com || ssh-keyscan github.com >>~/.ssh/known_hosts cp ~/.ssh/github/* ~/.ssh echo "Clone ML pipeline code in COMMIT SHA ${COMMIT_SHA}..." -git clone git@github.com:googleprivate/ml.git ${BASE_DIR} +git clone git@github.com:kubeflow/pipelines.git ${BASE_DIR} cd ${BASE_DIR} git checkout ${COMMIT_SHA} diff --git a/test/presubmit-tests-gce-minikube.sh b/test/presubmit-tests-gce-minikube.sh index b0e02f2b709..bb43f79e96f 100755 --- a/test/presubmit-tests-gce-minikube.sh +++ b/test/presubmit-tests-gce-minikube.sh @@ -44,7 +44,7 @@ function delete_vm { } #Setting the exit handler to delete VM. The VM will be deleted when the script exists (either completes or fails) -#TODO: Find a more resilent way to clean up VMs. Right now the VM is not deleted if the machine running this script fails. (See https://github.com/googleprivate/ml/issues/1064) +#TODO: Find a more resilent way to clean up VMs. Right now the VM is not deleted if the machine running this script fails. (See https://github.com/kubeflow/pipelines/issues/1064) trap delete_vm EXIT #Creating the VM @@ -75,7 +75,7 @@ gcloud compute scp --zone=$ZONE "$ssh_key_file" $instance_name:/etc/ssh-knative/ #Copy repo git_root=$(git rev-parse --show-toplevel) git_root_parent=$(dirname "$git_root") -gcloud compute scp --zone=$ZONE --verbosity=error --recurse "$git_root" $instance_name:'~' >/dev/null || true #Do not fail on error here because of broken symlinks until this is fixed: https://github.com/googleprivate/ml/issues/1084 +gcloud compute scp --zone=$ZONE --verbosity=error --recurse "$git_root" $instance_name:'~' >/dev/null || true #Do not fail on error here because of broken symlinks until this is fixed: https://github.com/kubeflow/pipelines/issues/1084 #Installing software on VM gcloud compute ssh --zone=$ZONE $instance_name -- "~/ml/test/minikube/install_docker_minikube_argo.sh" diff --git a/test/presubmit-tests.gke.sh b/test/presubmit-tests.gke.sh index 0ecb80cf397..37f72584b69 100755 --- a/test/presubmit-tests.gke.sh +++ b/test/presubmit-tests.gke.sh @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -#This test endpoint is not used yet. See https://github.com/googleprivate/ml/issues/1499 +#This test endpoint is not used yet. See https://github.com/kubeflow/pipelines/issues/1499 #Due to the way Prow testing scripts are called, any big change needs to be done in multiple steps/check-ins so that nothing breaks. #Here is the sequence of check-ins: #New entry-point script (this script - presubmit-tests.gke.sh) diff --git a/test/sample-test/run_test.sh b/test/sample-test/run_test.sh index 071203fe794..331ad8ca211 100755 --- a/test/sample-test/run_test.sh +++ b/test/sample-test/run_test.sh @@ -77,7 +77,7 @@ if [ -z "$RESULTS_GCS_DIR" ]; then exit 1 fi -GITHUB_REPO=googleprivate/ml +GITHUB_REPO=kubeflow/pipelines BASE_DIR=/python/src/github.com/${GITHUB_REPO} # Add github to SSH known host. diff --git a/test/sample_test.yaml b/test/sample_test.yaml index 900eb48f639..cca3c7007a0 100644 --- a/test/sample_test.yaml +++ b/test/sample_test.yaml @@ -250,7 +250,7 @@ spec: value: Dockerfile - name: image-suffix container: - image: gcr.io/ml-pipeline-staging/image-builder:v20181011-0.0.20-18-g5ab78f1c-a9becd + image: gcr.io/ml-pipeline-staging/image-builder:v20181102-0.0.20-195-g2c8e37ab-e3b0c4 imagePullPolicy: 'Always' args: [ "--commit_sha", "{{inputs.parameters.commit-sha}}", @@ -282,7 +282,7 @@ spec: - name: build-script - name: image-suffix container: - image: gcr.io/ml-pipeline-staging/image-builder:v20181011-0.0.20-18-g5ab78f1c-a9becd + image: gcr.io/ml-pipeline-staging/image-builder:v20181102-0.0.20-195-g2c8e37ab-e3b0c4 imagePullPolicy: 'Always' args: [ "--commit_sha", "{{inputs.parameters.commit-sha}}", diff --git a/test/unit_test_gke.yaml b/test/unit_test_gke.yaml index 1f0ed581777..65291f74196 100644 --- a/test/unit_test_gke.yaml +++ b/test/unit_test_gke.yaml @@ -45,7 +45,7 @@ spec: - name: commit-sha - name: test-results-gcs-dir container: - image: gcr.io/ml-pipeline-staging/backend-unit-test:v20180914-0.0.15-72-gd59126c4-e3b0c4 + image: gcr.io/ml-pipeline-staging/backend-unit-test:v20181102-0.0.20-196-g4d66038f-e3b0c4 imagePullPolicy: 'Always' args: [ "--results-gcs-dir", "{{inputs.parameters.test-results-gcs-dir}}",