diff --git a/tests/test_kafka.py b/tests/test_kafka.py index ad45de01747aa..5889481b253ea 100644 --- a/tests/test_kafka.py +++ b/tests/test_kafka.py @@ -107,6 +107,7 @@ def setup_spark(kerberized_kafka, configure_security_spark, configure_universe): @pytest.mark.sanity +@pytest.mark.smoke @pytest.mark.skipif(not utils.kafka_enabled(), reason='KAFKA_ENABLED is false') def test_spark_and_kafka(): kerberos_flag = "true" if KERBERIZED_KAFKA else "false" # flag for using kerberized kafka given to app diff --git a/tests/test_spark.py b/tests/test_spark.py index b7f408dbb68e3..02aa4c4ef6077 100644 --- a/tests/test_spark.py +++ b/tests/test_spark.py @@ -46,6 +46,7 @@ def setup_spark(configure_security, configure_universe): @pytest.mark.xfail(utils.is_strict(), reason="Currently fails in strict mode") @pytest.mark.sanity +@pytest.mark.smoke def test_jar(app_name=utils.SPARK_APP_NAME): master_url = ("https" if utils.is_strict() else "http") + "://leader.mesos:5050" spark_job_runner_args = '{} dcos \\"*\\" spark:only 2 --auth-token={}'.format( @@ -60,6 +61,7 @@ def test_jar(app_name=utils.SPARK_APP_NAME): @pytest.mark.sanity +@pytest.mark.smoke def test_rpc_auth(): secret_name = "sparkauth" @@ -94,6 +96,7 @@ def test_sparkPi(app_name=utils.SPARK_APP_NAME): @pytest.mark.sanity +@pytest.mark.smoke def test_python(): python_script_path = os.path.join(THIS_DIR, 'jobs', 'python', 'pi_with_include.py') python_script_url = utils.upload_file(python_script_path) @@ -106,6 +109,7 @@ def test_python(): @pytest.mark.sanity +@pytest.mark.smoke def test_r(): r_script_path = os.path.join(THIS_DIR, 'jobs', 'R', 'dataframe.R') r_script_url = utils.upload_file(r_script_path) @@ -125,6 +129,7 @@ def test_cni(): #@pytest.mark.skip("Enable when SPARK-21694 is merged and released in DC/OS Spark") @pytest.mark.sanity +@pytest.mark.smoke def test_cni_labels(): driver_task_id = utils.submit_job(app_url=utils.SPARK_EXAMPLES, app_args="3000", # Long enough to examine the Driver's & Executor's task infos @@ -166,6 +171,7 @@ def _check_task_network_info(task): @pytest.mark.sanity +@pytest.mark.smoke def test_s3(): def make_credential_secret(envvar, secret_path): rc, stdout, stderr = sdk_cmd.run_raw_cli("security secrets create {p} -v {e}" @@ -234,6 +240,7 @@ def make_credential_secret(envvar, secret_path): # Skip DC/OS < 1.10, because it doesn't have adminrouter support for service groups. @pytest.mark.skipif('shakedown.dcos_version_less_than("1.10")') @pytest.mark.sanity +@pytest.mark.smoke def test_marathon_group(): app_id = utils.FOLDERED_SPARK_APP_NAME options = {"service": {"name": app_id}} @@ -243,6 +250,7 @@ def test_marathon_group(): #shakedown.uninstall_package_and_wait(SPARK_PACKAGE_NAME, app_id) + @pytest.mark.sanity def test_cli_multiple_spaces(): utils.run_tests(app_url=utils.SPARK_EXAMPLES, @@ -256,6 +264,7 @@ def test_cli_multiple_spaces(): @pytest.mark.skipif('shakedown.dcos_version_less_than("1.10")') @sdk_utils.dcos_ee_only @pytest.mark.sanity +@pytest.mark.smoke def test_driver_executor_tls(): ''' Put keystore and truststore as secrets in DC/OS secret store.