Skip to content
This repository was archived by the owner on Sep 4, 2024. It is now read-only.

Commit

Permalink
Reference JOBS_API_VERSION instead of hardcoding value in asserts
Browse files Browse the repository at this point in the history
  • Loading branch information
pankajkoti committed Mar 20, 2024
1 parent 2c9ed16 commit 769744c
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 8 deletions.
13 changes: 7 additions & 6 deletions tests/databricks/test_notebook.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import pytest
from airflow.exceptions import AirflowException
from astro_databricks.constants import JOBS_API_VERSION
from astro_databricks.operators.notebook import DatabricksNotebookOperator
from astro_databricks.operators.workflow import (
DatabricksWorkflowTaskGroup,
Expand Down Expand Up @@ -152,7 +153,7 @@ def test_databricks_notebook_operator_without_taskgroup_new_cluster(
"timeout_seconds": 0,
"email_notifications": {},
},
version="2.1",
version=JOBS_API_VERSION,
)
mock_monitor.assert_called_once()

Expand Down Expand Up @@ -199,7 +200,7 @@ def test_databricks_notebook_operator_without_taskgroup_existing_cluster(
"timeout_seconds": 0,
"email_notifications": {},
},
version="2.1",
version=JOBS_API_VERSION,
)
mock_monitor.assert_called_once()

Expand Down Expand Up @@ -296,7 +297,7 @@ def test_wait_for_pending_task(mock_sleep, mock_runs_api, databricks_notebook_op
{"state": {"life_cycle_state": "RUNNING"}},
]
databricks_notebook_operator._wait_for_pending_task(current_task, mock_runs_api)
mock_runs_api.get_run.assert_called_with("123", version="2.1")
mock_runs_api.get_run.assert_called_with("123", version=JOBS_API_VERSION)
assert mock_runs_api.get_run.call_count == 2
mock_runs_api.reset_mock()

Expand All @@ -313,7 +314,7 @@ def test_wait_for_terminating_task(
{"state": {"life_cycle_state": "TERMINATED"}},
]
databricks_notebook_operator._wait_for_terminating_task(current_task, mock_runs_api)
mock_runs_api.get_run.assert_called_with("123", version="2.1")
mock_runs_api.get_run.assert_called_with("123", version=JOBS_API_VERSION)
assert mock_runs_api.get_run.call_count == 3
mock_runs_api.reset_mock()

Expand All @@ -328,7 +329,7 @@ def test_wait_for_running_task(mock_sleep, mock_runs_api, databricks_notebook_op
{"state": {"life_cycle_state": "TERMINATED"}},
]
databricks_notebook_operator._wait_for_running_task(current_task, mock_runs_api)
mock_runs_api.get_run.assert_called_with("123", version="2.1")
mock_runs_api.get_run.assert_called_with("123", version=JOBS_API_VERSION)
assert mock_runs_api.get_run.call_count == 3
mock_runs_api.reset_mock()

Expand Down Expand Up @@ -382,7 +383,7 @@ def test_monitor_databricks_job_success(
databricks_notebook_operator.databricks_run_id = "1"
databricks_notebook_operator.monitor_databricks_job()
mock_runs_api.return_value.get_run.assert_called_with(
databricks_notebook_operator.databricks_run_id, version="2.1"
databricks_notebook_operator.databricks_run_id, version=JOBS_API_VERSION
)
assert (
"Check the job run in Databricks: https://databricks-instance-xyz.cloud.databricks.com/#job/1234/run/1"
Expand Down
5 changes: 3 additions & 2 deletions tests/databricks/test_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import pytest
from airflow.exceptions import AirflowException
from airflow.utils.task_group import TaskGroup
from astro_databricks.constants import JOBS_API_VERSION
from astro_databricks.operators.notebook import DatabricksNotebookOperator
from astro_databricks.operators.workflow import DatabricksWorkflowTaskGroup

Expand Down Expand Up @@ -142,15 +143,15 @@ def test_create_workflow_from_notebooks_with_create(
task_group.children["test_workflow.launch"].execute(context={})
mock_jobs_api.return_value.create_job.assert_called_once_with(
json=expected_workflow_json,
version="2.1",
version=JOBS_API_VERSION,
)
mock_jobs_api.return_value.run_now.assert_called_once_with(
job_id=1,
jar_params=[],
notebook_params={"notebook_path": "/foo/bar"},
python_params=[],
spark_submit_params=[],
version="2.1",
version=JOBS_API_VERSION,
)


Expand Down

0 comments on commit 769744c

Please sign in to comment.