From 06e544ba8cfe86fff85fdd3d26349035669df677 Mon Sep 17 00:00:00 2001 From: Ning Date: Thu, 11 Apr 2019 21:40:45 -0700 Subject: [PATCH] add type checking sample to sample tests (#1129) * add type checking sample to sample tests * Dadd the test script exit code to the sample test result; update the check_notebook_result script to not validate the pipeline runs when experiment arg is not provided * fix typo --- .../notebooks/DSL Static Type Checking.ipynb | 1 + test/sample-test/check_notebook_results.py | 68 +++++++++++-------- test/sample-test/run_test.sh | 22 +++++- test/sample_test_v2.yaml | 15 +++- 4 files changed, 75 insertions(+), 31 deletions(-) diff --git a/samples/notebooks/DSL Static Type Checking.ipynb b/samples/notebooks/DSL Static Type Checking.ipynb index 8aa878f75ee..ed89b79fe10 100644 --- a/samples/notebooks/DSL Static Type Checking.ipynb +++ b/samples/notebooks/DSL Static Type Checking.ipynb @@ -780,6 +780,7 @@ } ], "metadata": { + "celltoolbar": "Tags", "kernelspec": { "display_name": "Python 3", "language": "python", diff --git a/test/sample-test/check_notebook_results.py b/test/sample-test/check_notebook_results.py index 18e5fc76ae1..bf38deccddd 100644 --- a/test/sample-test/check_notebook_results.py +++ b/test/sample-test/check_notebook_results.py @@ -17,7 +17,12 @@ import utils ###### Input/Output Instruction ###### -# input: experiment name, testname, and, namespace +# args: +# experiment: where the test run belong, only necessary when a job is submitted. +# namespace: where the pipeline system is deployed. +# testname: test name in the json xml +# result: name of the file that stores the test result +# exit_code: the exit code of the bash command that runs the test. # Parsing the input arguments def parse_arguments(): @@ -26,20 +31,23 @@ def parse_arguments(): parser = argparse.ArgumentParser() parser.add_argument('--experiment', type=str, - required=True, help='The experiment name') - parser.add_argument('--testname', - type=str, - required=True, - help="Test name") parser.add_argument('--namespace', type=str, default='kubeflow', help="namespace of the deployed pipeline system. Default: kubeflow") + parser.add_argument('--testname', + type=str, + required=True, + help="Test name") parser.add_argument('--result', type=str, required=True, help='The path of the test result that will be exported.') + parser.add_argument('--exit-code', + type=str, + required=True, + help='The exit code of the bash command that runs the test.') args = parser.parse_args() return args @@ -48,33 +56,37 @@ def main(): test_cases = [] test_name = args.testname + ' Sample Test' - ###### Initialization ###### - host = 'ml-pipeline.%s.svc.cluster.local:8888' % args.namespace - client = Client(host=host) + ###### Write the script exit code log ###### + utils.add_junit_test(test_cases, 'test script execution', (args.exit_code == '0'), 'test script failure with exit code: ' + args.exit_code) + + if args.experiment is not None: + ###### Initialization ###### + host = 'ml-pipeline.%s.svc.cluster.local:8888' % args.namespace + client = Client(host=host) - ###### Get experiments ###### - experiment_id = client.get_experiment(experiment_name=args.experiment).id + ###### Get experiments ###### + experiment_id = client.get_experiment(experiment_name=args.experiment).id - ###### Get runs ###### - list_runs_response = client.list_runs(page_size=1000, experiment_id=experiment_id) + ###### Get runs ###### + list_runs_response = client.list_runs(page_size=1000, experiment_id=experiment_id) - ###### Check all runs ###### - for run in list_runs_response.runs: - run_id = run.id - response = client.wait_for_run_completion(run_id, 1200) - succ = (response.run.status.lower()=='succeeded') - utils.add_junit_test(test_cases, 'job completion', succ, 'waiting for job completion failure') + ###### Check all runs ###### + for run in list_runs_response.runs: + run_id = run.id + response = client.wait_for_run_completion(run_id, 1200) + succ = (response.run.status.lower()=='succeeded') + utils.add_junit_test(test_cases, 'job completion', succ, 'waiting for job completion failure') - ###### Output Argo Log for Debugging ###### - workflow_json = client._get_workflow_json(run_id) - workflow_id = workflow_json['metadata']['name'] - argo_log, _ = utils.run_bash_command('argo logs -n {} -w {}'.format(args.namespace, workflow_id)) - print("=========Argo Workflow Log=========") - print(argo_log) + ###### Output Argo Log for Debugging ###### + workflow_json = client._get_workflow_json(run_id) + workflow_id = workflow_json['metadata']['name'] + argo_log, _ = utils.run_bash_command('argo logs -n {} -w {}'.format(args.namespace, workflow_id)) + print("=========Argo Workflow Log=========") + print(argo_log) - if not succ: - utils.write_junit_xml(test_name, args.result, test_cases) - exit(1) + if not succ: + utils.write_junit_xml(test_name, args.result, test_cases) + exit(1) ###### Write out the test result in junit xml ###### utils.write_junit_xml(test_name, args.result, test_cases) diff --git a/test/sample-test/run_test.sh b/test/sample-test/run_test.sh index a1e84778cc3..aea7b2f18ce 100755 --- a/test/sample-test/run_test.sh +++ b/test/sample-test/run_test.sh @@ -314,8 +314,9 @@ elif [ "$TEST_NAME" == "notebook-tfx" ]; then jupyter nbconvert --to python notebook-tfx.ipynb pip3 install tensorflow==1.8.0 ipython notebook-tfx.py + EXIT_CODE=$? cd "${TEST_DIR}" - python3 check_notebook_results.py --experiment notebook-tfx-test --testname notebooktfx --result $SAMPLE_NOTEBOOK_TFX_TEST_RESULT --namespace ${NAMESPACE} + python3 check_notebook_results.py --experiment notebook-tfx-test --testname notebooktfx --result $SAMPLE_NOTEBOOK_TFX_TEST_RESULT --namespace ${NAMESPACE} --exit-code ${EXIT_CODE} echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/" gsutil cp $SAMPLE_NOTEBOOK_TFX_TEST_RESULT ${RESULTS_GCS_DIR}/$SAMPLE_NOTEBOOK_TFX_TEST_RESULT @@ -334,9 +335,26 @@ elif [ "$TEST_NAME" == "notebook-lightweight" ]; then jupyter nbconvert --to python notebook-lightweight.ipynb pip3 install tensorflow==1.8.0 ipython notebook-lightweight.py + EXIT_CODE=$? cd "${TEST_DIR}" - python3 check_notebook_results.py --experiment notebook-lightweight --testname notebooklightweight --result $SAMPLE_NOTEBOOK_LIGHTWEIGHT_TEST_RESULT --namespace ${NAMESPACE} + python3 check_notebook_results.py --experiment notebook-lightweight --testname notebooklightweight --result $SAMPLE_NOTEBOOK_LIGHTWEIGHT_TEST_RESULT --namespace ${NAMESPACE} --exit-code ${EXIT_CODE} echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/" gsutil cp $SAMPLE_NOTEBOOK_LIGHTWEIGHT_TEST_RESULT ${RESULTS_GCS_DIR}/$SAMPLE_NOTEBOOK_LIGHTWEIGHT_TEST_RESULT +elif [ "$TEST_NAME" == "notebook-typecheck" ]; then + SAMPLE_NOTEBOOK_TYPECHECK_TEST_RESULT=junit_SampleNotebookTypecheckOutput.xml + SAMPLE_NOTEBOOK_TYPECHECK_TEST_OUTPUT=${RESULTS_GCS_DIR} + + cd ${BASE_DIR}/samples/notebooks + export LC_ALL=C.UTF-8 + export LANG=C.UTF-8 + papermill --prepare-only -p KFP_PACKAGE /tmp/kfp.tar.gz DSL\ Static\ Type\ Checking.ipynb notebook-typecheck.ipynb + jupyter nbconvert --to python notebook-typecheck.ipynb + ipython notebook-typecheck.py + EXIT_CODE=$? + cd "${TEST_DIR}" + python3 check_notebook_results.py --testname notebooktypecheck --result $SAMPLE_NOTEBOOK_TYPECHECK_TEST_RESULT --exit-code ${EXIT_CODE} + + echo "Copy the test results to GCS ${RESULTS_GCS_DIR}/" + gsutil cp $SAMPLE_NOTEBOOK_TYPECHECK_TEST_RESULT ${RESULTS_GCS_DIR}/$SAMPLE_NOTEBOOK_TYPECHECK_TEST_RESULT fi diff --git a/test/sample_test_v2.yaml b/test/sample_test_v2.yaml index 2e9aac399c0..8c7e3b90780 100644 --- a/test/sample_test_v2.yaml +++ b/test/sample_test_v2.yaml @@ -120,7 +120,20 @@ spec: value: "{{inputs.parameters.namespace}}" - name: test-name value: "notebook-lightweight" - + - name: run-notebook-typecheck-tests + template: run-sample-tests + arguments: + parameters: + - name: test-results-gcs-dir + value: "{{inputs.parameters.test-results-gcs-dir}}" + - name: target-image-prefix + value: "{{inputs.parameters.target-image-prefix}}" + - name: sample-tests-image + value: "{{inputs.parameters.target-image-prefix}}{{inputs.parameters.sample-tests-image-suffix}}" + - name: namespace + value: "{{inputs.parameters.namespace}}" + - name: test-name + value: "notebook-typecheck" # Build and push image - name: build-image-by-dockerfile