Skip to content
This repository was archived by the owner on Sep 4, 2024. It is now read-only.

Commit

Permalink
Add expected version param in mock call asserts
Browse files Browse the repository at this point in the history
  • Loading branch information
pankajkoti committed Mar 20, 2024
1 parent ed147a6 commit 10af94e
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 77 deletions.
2 changes: 2 additions & 0 deletions tests/databricks/test_notebook.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ def test_databricks_notebook_operator_without_taskgroup_new_cluster(
"libraries": [{"nb_index": {"package": "nb_package"}}],
"timeout_seconds": 0,
"email_notifications": {},
"version": "2.1",
}
)
mock_monitor.assert_called_once()
Expand Down Expand Up @@ -197,6 +198,7 @@ def test_databricks_notebook_operator_without_taskgroup_existing_cluster(
"libraries": [{"nb_index": {"package": "nb_package"}}],
"timeout_seconds": 0,
"email_notifications": {},
"version": "2.1",
}
)
mock_monitor.assert_called_once()
Expand Down
88 changes: 11 additions & 77 deletions tests/databricks/test_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
},
],
"timeout_seconds": 0,
"version": "2.1",
}


Expand Down Expand Up @@ -289,79 +290,6 @@ def test_create_workflow_with_arbitrary_extra_job_params(
)


@mock.patch("astro_databricks.operators.workflow.DatabricksHook")
@mock.patch("astro_databricks.operators.workflow.ApiClient")
@mock.patch("astro_databricks.operators.workflow.JobsApi")
@mock.patch("astro_databricks.operators.workflow._get_job_by_name")
@mock.patch(
"astro_databricks.operators.workflow.RunsApi.get_run",
return_value={"state": {"life_cycle_state": "RUNNING"}},
)
def test_create_workflow_with_arbitrary_extra_job_params(
mock_run_api, mock_get_jobs, mock_jobs_api, mock_api, mock_hook, dag
):
mock_get_jobs.return_value = {"job_id": 862519602273592}

extra_job_params = {
"timeout_seconds": 10, # default: 0
"webhook_notifications": {
"on_failure": [{"id": "b0aea8ab-ea8c-4a45-a2e9-9a26753fd702"}],
},
"email_notifications": {
"no_alert_for_skipped_runs": True, # default: False
"on_start": ["user.name@databricks.com"],
},
"git_source": { # no default value
"git_url": "https://github.com/astronomer/astro-provider-databricks",
"git_provider": "gitHub",
"git_branch": "main",
},
}
with dag:
task_group = DatabricksWorkflowTaskGroup(
group_id="test_workflow",
databricks_conn_id="foo",
job_clusters=[{"job_cluster_key": "foo"}],
notebook_params={"notebook_path": "/foo/bar"},
extra_job_params=extra_job_params,
)
with task_group:
notebook_with_extra = DatabricksNotebookOperator(
task_id="notebook_with_extra",
databricks_conn_id="foo",
notebook_path="/foo/bar",
source="WORKSPACE",
job_cluster_key="foo",
)
notebook_with_extra

assert len(task_group.children) == 2

task_group.children["test_workflow.launch"].create_workflow_json()
task_group.children["test_workflow.launch"].execute(context={})

mock_jobs_api.return_value.reset_job.assert_called_once()
kwargs = mock_jobs_api.return_value.reset_job.call_args_list[0].kwargs["json"]

assert kwargs["job_id"] == 862519602273592
assert (
kwargs["new_settings"]["email_notifications"]
== extra_job_params["email_notifications"]
)
assert (
kwargs["new_settings"]["timeout_seconds"] == extra_job_params["timeout_seconds"]
)
assert kwargs["new_settings"]["git_source"] == extra_job_params["git_source"]
assert (
kwargs["new_settings"]["webhook_notifications"]
== extra_job_params["webhook_notifications"]
)
assert (
kwargs["new_settings"]["email_notifications"]
== extra_job_params["email_notifications"]
)


@mock.patch("astro_databricks.operators.workflow.DatabricksHook")
@mock.patch("astro_databricks.operators.workflow.ApiClient")
@mock.patch("astro_databricks.operators.workflow.JobsApi")
Expand Down Expand Up @@ -399,7 +327,7 @@ def test_create_workflow_with_nested_task_groups(
extra_job_params=extra_job_params,
notebook_packages=[
{"pypi": {"package": "mlflow==2.4.0"}},
]
],
)
with outer_task_group:
direct_notebook = DatabricksNotebookOperator(
Expand Down Expand Up @@ -433,8 +361,14 @@ def test_create_workflow_with_nested_task_groups(
inner_notebook_json = kwargs["new_settings"]["tasks"][0]
outer_notebook_json = kwargs["new_settings"]["tasks"][1]

assert inner_notebook_json["task_key"] == "unit_test_dag__test_workflow__direct_notebook"
assert (
inner_notebook_json["task_key"]
== "unit_test_dag__test_workflow__direct_notebook"
)
assert inner_notebook_json["libraries"] == [{"pypi": {"package": "mlflow==2.4.0"}}]

assert outer_notebook_json["task_key"] == "unit_test_dag__test_workflow__middle_task_group__inner_task_group__inner_notebook"
assert outer_notebook_json["libraries"] == [{"pypi": {"package": "mlflow==2.4.0"}}]
assert (
outer_notebook_json["task_key"]
== "unit_test_dag__test_workflow__middle_task_group__inner_task_group__inner_notebook"
)
assert outer_notebook_json["libraries"] == [{"pypi": {"package": "mlflow==2.4.0"}}]

0 comments on commit 10af94e

Please sign in to comment.