Skip to content

Commit

Permalink
Add basic sample tests (#79)
Browse files Browse the repository at this point in the history
* add sequential sample test

* add condition basic sample

* reuse script

* add all the other basic tests

* update sample test dockerfile to add run_basic_test file

* write test output

* typo bug
  • Loading branch information
gaoning777 authored and k8s-ci-robot committed Nov 7, 2018
1 parent 13c6515 commit 8427e30
Show file tree
Hide file tree
Showing 4 changed files with 369 additions and 1 deletion.
1 change: 1 addition & 0 deletions test/sample-test/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ RUN pip3 install minio
COPY ./run_test.sh /run_test.sh
COPY ./run_kubeflow_test.py /run_kubeflow_test.py
COPY ./run_tfx_test.py /run_tfx_test.py
COPY ./run_basic_test.py /run_basic_test.py
COPY ./run_xgboost_test.py /run_xgboost_test.py
COPY ./utils.py /utils.py
RUN chmod +x /run_test.sh
Expand Down
105 changes: 105 additions & 0 deletions test/sample-test/run_basic_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import argparse
import os
from datetime import datetime
from kfp import Client
import utils

###### Input/Output Instruction ######
# input: yaml
# output: local file path


# Parsing the input arguments
def parse_arguments():
"""Parse command line arguments."""

parser = argparse.ArgumentParser()
parser.add_argument('--input',
type=str,
required=True,
help='The path of a pipeline package that will be submitted.')
parser.add_argument('--result',
type=str,
required=True,
help='The path of the test result that will be exported.')
parser.add_argument('--output',
type=str,
required=True,
help='The path of the test output')
parser.add_argument('--testname',
type=str,
required=True,
help="Test name")
args = parser.parse_args()
return args

def main():
args = parse_arguments()
test_cases = []
test_name = args.testname + ' Sample Test'

###### Initialization ######
client = Client()

###### Check Input File ######
utils.add_junit_test(test_cases, 'input generated yaml file', os.path.exists(args.input), 'yaml file is not generated')
if not os.path.exists(args.input):
utils.write_junit_xml(test_name, args.result, test_cases)
exit()

###### Create Experiment ######
experiment_name = args.testname + ' sample experiment'
response = client.create_experiment(experiment_name)
experiment_id = response.id
utils.add_junit_test(test_cases, 'create experiment', True)

###### Create Job ######
job_name = args.testname +'_sample'
params = {}
response = client.run_pipeline(experiment_id, job_name, args.input, params)
run_id = response.id
utils.add_junit_test(test_cases, 'create pipeline run', True)


###### Monitor Job ######
start_time = datetime.now()
response = client.wait_for_run_completion(run_id, 1200)
succ = (response.run.status.lower()=='succeeded')
end_time = datetime.now()
elapsed_time = (end_time - start_time).seconds
utils.add_junit_test(test_cases, 'job completion', succ, 'waiting for job completion failure', elapsed_time)
if not succ:
utils.write_junit_xml(test_name, args.result, test_cases)
exit()

###### Output Argo Log for Debugging ######
workflow_json = client._get_workflow_json(run_id)
workflow_id = workflow_json['metadata']['name']
#TODO: remove the namespace dependency or make is configurable.
argo_log, _ = utils.run_bash_command('argo logs -n kubeflow -w {}'.format(workflow_id))
print("=========Argo Workflow Log=========")
print(argo_log)

###### Delete Job ######
#TODO: add deletion when the backend API offers the interface.

###### Write out the test result in junit xml ######
utils.write_junit_xml(test_name, args.result, test_cases)

if __name__ == "__main__":
main()
64 changes: 64 additions & 0 deletions test/sample-test/run_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,71 @@ elif [ "$TEST_NAME" == "tfx" ]; then
python3 run_tfx_test.py --input ${BASE_DIR}/samples/tfx/taxi-cab-classification-pipeline.tar.gz --result $SAMPLE_TFX_TEST_RESULT --output $SAMPLE_TFX_TEST_OUTPUT
echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/"
gsutil cp ${SAMPLE_TFX_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_TFX_TEST_RESULT}
elif [ "$TEST_NAME" == "sequential" ]; then
SAMPLE_SEQUENTIAL_TEST_RESULT=junit_SampleSequentialOutput.xml
SAMPLE_SEQUENTIAL_TEST_OUTPUT=${RESULTS_GCS_DIR}

# Compile samples
cd ${BASE_DIR}/samples/basic
dsl-compile --py sequential.py --output sequential.tar.gz

cd /
python3 run_basic_test.py --input ${BASE_DIR}/samples/basic/sequential.tar.gz --result SAMPLE_SEQUENTIAL_TEST_RESULT --output SAMPLE_SEQUENTIAL_TEST_OUTPUT --testname sequential

echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/"
gsutil cp ${SAMPLE_SEQUENTIAL_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_SEQUENTIAL_TEST_RESULT}
elif [ "$TEST_NAME" == "condition" ]; then
SAMPLE_CONDITION_TEST_RESULT=junit_SampleConditionOutput.xml
SAMPLE_CONDITION_TEST_OUTPUT=${RESULTS_GCS_DIR}

# Compile samples
cd ${BASE_DIR}/samples/basic
dsl-compile --py condition.py --output condition.tar.gz

cd /
python3 run_basic_test.py --input ${BASE_DIR}/samples/basic/condition.tar.gz --result SAMPLE_CONDITION_TEST_RESULT --output SAMPLE_CONDITION_TEST_OUTPUT --testname conditio

echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/"
gsutil cp ${SAMPLE_CONDITION_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_CONDITION_TEST_RESULT}
elif [ "$TEST_NAME" == "exithandler" ]; then
SAMPLE_EXIT_HANDLER_TEST_RESULT=junit_SampleExitHandlerOutput.xml
SAMPLE_EXIT_HANDLER_TEST_OUTPUT=${RESULTS_GCS_DIR}

# Compile samples
cd ${BASE_DIR}/samples/basic
dsl-compile --py exit_handler.py --output exit_handler.tar.gz

cd /
python3 run_basic_test.py --input ${BASE_DIR}/samples/basic/exit_handler.tar.gz --result SAMPLE_EXIT_HANDLER_TEST_RESULT --output SAMPLE_EXIT_HANDLER_TEST_OUTPUT --testname exithandler

echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/"
gsutil cp ${SAMPLE_EXIT_HANDLER_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_EXIT_HANDLER_TEST_RESULT}
elif [ "$TEST_NAME" == "immediatevalue" ]; then
SAMPLE_IMMEDIATE_VALUE_TEST_RESULT=junit_SampleImmediateValueOutput.xml
SAMPLE_IMMEDIATE_VALUE_TEST_OUTPUT=${RESULTS_GCS_DIR}

# Compile samples
cd ${BASE_DIR}/samples/basic
dsl-compile --py immediate_value.py --output immediate_value.tar.gz

cd /
python3 run_basic_test.py --input ${BASE_DIR}/samples/basic/immediate_value.tar.gz --result SAMPLE_IMMEDIATE_VALUE_TEST_RESULT --output SAMPLE_IMMEDIATE_VALUE_TEST_OUTPUT --testname immediatevalue

echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/"
gsutil cp ${SAMPLE_IMMEDIATE_VALUE_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_IMMEDIATE_VALUE_TEST_RESULT}
elif [ "$TEST_NAME" == "paralleljoin" ]; then
SAMPLE_PARALLEL_JOIN_TEST_RESULT=junit_SampleParallelJoinOutput.xml
SAMPLE_PARALLEL_JOIN_TEST_OUTPUT=${RESULTS_GCS_DIR}

# Compile samples
cd ${BASE_DIR}/samples/basic
dsl-compile --py parallel_join.py --output parallel_join.tar.gz

cd /
python3 run_basic_test.py --input ${BASE_DIR}/samples/basic/parallel_join.tar.gz --result SAMPLE_PARALLEL_JOIN_TEST_RESULT --output SAMPLE_PARALLEL_JOIN_TEST_OUTPUT --testname paralleljoin

echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/"
gsutil cp ${SAMPLE_PARALLEL_JOIN_TEST_RESULT} ${RESULTS_GCS_DIR}/${SAMPLE_PARALLEL_JOIN_TEST_RESULT}
elif [ "$TEST_NAME" == "xgboost" ]; then
SAMPLE_XGBOOST_TEST_RESULT=junit_SampleXGBoostOutput.xml
SAMPLE_XGBOOST_TEST_OUTPUT=${RESULTS_GCS_DIR}
Expand Down
Loading

0 comments on commit 8427e30

Please sign in to comment.