diff --git a/superset/common/db_query_status.py b/superset/common/db_query_status.py new file mode 100644 index 0000000000000..82bb437f657f1 --- /dev/null +++ b/superset/common/db_query_status.py @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from enum import Enum + + +class QueryStatus(str, Enum): + """Enum-type class for query statuses""" + + STOPPED: str = "stopped" + FAILED: str = "failed" + PENDING: str = "pending" + RUNNING: str = "running" + SCHEDULED: str = "scheduled" + SUCCESS: str = "success" + FETCHING: str = "fetching" + TIMED_OUT: str = "timed_out" diff --git a/superset/common/query_actions.py b/superset/common/query_actions.py index d0058bd68d406..86a687f08716a 100644 --- a/superset/common/query_actions.py +++ b/superset/common/query_actions.py @@ -20,6 +20,7 @@ from flask_babel import _ from superset import app +from superset.common.db_query_status import QueryStatus from superset.connectors.base.models import BaseDatasource from superset.exceptions import QueryObjectValidationError from superset.utils.core import ( @@ -28,7 +29,6 @@ extract_dataframe_dtypes, ExtraFiltersReasonType, get_time_filter_status, - QueryStatus, ) if TYPE_CHECKING: diff --git a/superset/common/query_context.py b/superset/common/query_context.py index 17ba7c4823868..9f1f4bfdf06e8 100644 --- a/superset/common/query_context.py +++ b/superset/common/query_context.py @@ -29,6 +29,7 @@ from superset import app, db, is_feature_enabled from superset.annotation_layers.dao import AnnotationLayerDAO from superset.charts.dao import ChartDAO +from superset.common.db_query_status import QueryStatus from superset.common.query_actions import get_query_results from superset.common.query_object import QueryObject from superset.common.utils import QueryCacheManager @@ -49,7 +50,6 @@ get_column_names_from_metrics, get_metric_names, normalize_dttm_col, - QueryStatus, TIME_COMPARISION, ) from superset.utils.date_parser import get_past_or_future, normalize_time_delta diff --git a/superset/common/utils.py b/superset/common/utils.py index ab83b84922c1f..77a6baba7fba6 100644 --- a/superset/common/utils.py +++ b/superset/common/utils.py @@ -21,13 +21,14 @@ from pandas import DataFrame from superset import app +from superset.common.db_query_status import QueryStatus from superset.constants import CacheRegion from superset.exceptions import CacheLoadError from superset.extensions import cache_manager from superset.models.helpers import QueryResult from superset.stats_logger import BaseStatsLogger from superset.utils.cache import set_and_log_cache -from superset.utils.core import error_msg_from_exception, get_stacktrace, QueryStatus +from superset.utils.core import error_msg_from_exception, get_stacktrace config = app.config stats_logger: BaseStatsLogger = config["STATS_LOGGER"] diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index c87b4c2145804..7cebb8bac7c16 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -70,6 +70,7 @@ from sqlalchemy.sql.selectable import Alias, TableClause from superset import app, db, is_feature_enabled, security_manager +from superset.common.db_query_status import QueryStatus from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric from superset.connectors.sqla.utils import ( get_physical_table_metadata, @@ -151,12 +152,12 @@ def query(self, query_obj: QueryObjectDict) -> QueryResult: qry = qry.filter(Annotation.start_dttm >= query_obj["from_dttm"]) if query_obj["to_dttm"]: qry = qry.filter(Annotation.end_dttm <= query_obj["to_dttm"]) - status = utils.QueryStatus.SUCCESS + status = QueryStatus.SUCCESS try: df = pd.read_sql_query(qry.statement, db.engine) except Exception as ex: # pylint: disable=broad-except df = pd.DataFrame() - status = utils.QueryStatus.FAILED + status = QueryStatus.FAILED logger.exception(ex) error_message = utils.error_msg_from_exception(ex) return QueryResult( @@ -1444,7 +1445,7 @@ def query(self, query_obj: QueryObjectDict) -> QueryResult: qry_start_dttm = datetime.now() query_str_ext = self.get_query_str_extended(query_obj) sql = query_str_ext.sql - status = utils.QueryStatus.SUCCESS + status = QueryStatus.SUCCESS errors = None error_message = None @@ -1477,7 +1478,7 @@ def assign_column_label(df: pd.DataFrame) -> Optional[pd.DataFrame]: df = self.database.get_df(sql, self.schema, mutator=assign_column_label) except Exception as ex: # pylint: disable=broad-except df = pd.DataFrame() - status = utils.QueryStatus.FAILED + status = QueryStatus.FAILED logger.warning( "Query %s on schema %s failed", sql, self.schema, exc_info=True ) diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index 5b3790ac18e32..74b451ec2da62 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -82,8 +82,6 @@ class TimeGrain(NamedTuple): duration: Optional[str] -QueryStatus = utils.QueryStatus - builtin_time_grains: Dict[Optional[str], str] = { None: __("Original value"), "PT1S": __("Second"), diff --git a/superset/db_engine_specs/druid.py b/superset/db_engine_specs/druid.py index 0230a8c178588..58545d4f8d73e 100644 --- a/superset/db_engine_specs/druid.py +++ b/superset/db_engine_specs/druid.py @@ -31,7 +31,7 @@ logger = logging.getLogger() -class DruidEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method +class DruidEngineSpec(BaseEngineSpec): """Engine spec for Druid.io""" engine = "druid" diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index e33012e712819..b6c3bffdc8084 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -35,6 +35,7 @@ from sqlalchemy.orm import Session from sqlalchemy.sql.expression import ColumnClause, Select +from superset.common.db_query_status import QueryStatus from superset.db_engine_specs.base import BaseEngineSpec from superset.db_engine_specs.presto import PrestoEngineSpec from superset.exceptions import SupersetException @@ -48,7 +49,6 @@ from superset.models.core import Database -QueryStatus = utils.QueryStatus logger = logging.getLogger(__name__) diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index 783dde753a955..9be73045e4ed7 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -52,6 +52,7 @@ from sqlalchemy.types import TypeEngine from superset import cache_manager, is_feature_enabled +from superset.common.db_query_status import QueryStatus from superset.db_engine_specs.base import BaseEngineSpec from superset.errors import SupersetErrorType from superset.exceptions import SupersetTemplateException @@ -95,7 +96,6 @@ ) -QueryStatus = utils.QueryStatus logger = logging.getLogger(__name__) diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 580c906099eb4..30d5ab9696475 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -38,7 +38,7 @@ from sqlalchemy.orm.exc import MultipleResultsFound from sqlalchemy_utils import UUIDType -from superset.utils.core import QueryStatus +from superset.common.db_query_status import QueryStatus logger = logging.getLogger(__name__) diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py index 17c72db2051dc..ef0f34e4ffbd5 100644 --- a/superset/models/sql_lab.py +++ b/superset/models/sql_lab.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. """A collection of ORM sqlalchemy models for SQL Lab""" -import enum import re from datetime import datetime from typing import Any, Dict, List @@ -48,17 +47,10 @@ ) from superset.models.tags import QueryUpdater from superset.sql_parse import CtasMethod, ParsedQuery, Table +from superset.sqllab.limiting_factor import LimitingFactor from superset.utils.core import QueryStatus, user_label -class LimitingFactor(str, enum.Enum): - QUERY = "QUERY" - DROPDOWN = "DROPDOWN" - QUERY_AND_DROPDOWN = "QUERY_AND_DROPDOWN" - NOT_LIMITED = "NOT_LIMITED" - UNKNOWN = "UNKNOWN" - - class Query(Model, ExtraJSONMixin): """ORM model for SQL query diff --git a/superset/sql_lab.py b/superset/sql_lab.py index ec0b5d04a5616..f5c9e427f67d9 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -32,21 +32,18 @@ from sqlalchemy.orm import Session from superset import app, results_backend, results_backend_use_msgpack, security_manager +from superset.common.db_query_status import QueryStatus from superset.dataframe import df_to_records from superset.db_engine_specs import BaseEngineSpec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import SupersetErrorException, SupersetErrorsException from superset.extensions import celery_app -from superset.models.sql_lab import LimitingFactor, Query +from superset.models.sql_lab import Query from superset.result_set import SupersetResultSet from superset.sql_parse import CtasMethod, ParsedQuery +from superset.sqllab.limiting_factor import LimitingFactor from superset.utils.celery import session_scope -from superset.utils.core import ( - json_iso_dttm_ser, - QuerySource, - QueryStatus, - zlib_compress, -) +from superset.utils.core import json_iso_dttm_ser, QuerySource, zlib_compress from superset.utils.dates import now_as_float from superset.utils.decorators import stats_timing diff --git a/superset/sqllab/command.py b/superset/sqllab/command.py index c984de8e2da70..ea4fb45a67a5a 100644 --- a/superset/sqllab/command.py +++ b/superset/sqllab/command.py @@ -31,6 +31,7 @@ from superset import app, db, is_feature_enabled, sql_lab from superset.commands.base import BaseCommand +from superset.common.db_query_status import QueryStatus from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import ( SupersetErrorException, @@ -43,16 +44,16 @@ ) from superset.jinja_context import BaseTemplateProcessor, get_template_processor from superset.models.core import Database -from superset.models.sql_lab import LimitingFactor, Query +from superset.models.sql_lab import Query from superset.queries.dao import QueryDAO from superset.sqllab.command_status import SqlJsonExecutionStatus +from superset.sqllab.limiting_factor import LimitingFactor +from superset.sqllab.utils import apply_display_max_row_configuration_if_require from superset.utils import core as utils from superset.utils.dates import now_as_float from superset.utils.sqllab_execution_context import SqlJsonExecutionContext -from superset.views.utils import apply_display_max_row_limit config = app.config -QueryStatus = utils.QueryStatus logger = logging.getLogger(__name__) PARAMETER_MISSING_ERR = ( @@ -397,7 +398,9 @@ def _to_payload_results_based( # pylint: disable=no-self-use ) -> str: display_max_row = config["DISPLAY_MAX_ROW"] return json.dumps( - apply_display_max_row_limit(execution_result, display_max_row), + apply_display_max_row_configuration_if_require( + execution_result, display_max_row + ), default=utils.pessimistic_json_iso_dttm_ser, ignore_nan=True, encoding=None, diff --git a/superset/sqllab/limiting_factor.py b/superset/sqllab/limiting_factor.py new file mode 100644 index 0000000000000..46cbc9bd81c4e --- /dev/null +++ b/superset/sqllab/limiting_factor.py @@ -0,0 +1,25 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import enum + + +class LimitingFactor(str, enum.Enum): + QUERY = "QUERY" + DROPDOWN = "DROPDOWN" + QUERY_AND_DROPDOWN = "QUERY_AND_DROPDOWN" + NOT_LIMITED = "NOT_LIMITED" + UNKNOWN = "UNKNOWN" diff --git a/superset/sqllab/utils.py b/superset/sqllab/utils.py new file mode 100644 index 0000000000000..8181b5bd29b2a --- /dev/null +++ b/superset/sqllab/utils.py @@ -0,0 +1,47 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict + +from superset.common.db_query_status import QueryStatus + + +def apply_display_max_row_configuration_if_require( # pylint: disable=invalid-name + sql_results: Dict[str, Any], max_rows_in_result: int +) -> Dict[str, Any]: + """ + Given a `sql_results` nested structure, applies a limit to the number of rows + + `sql_results` here is the nested structure coming out of sql_lab.get_sql_results, it + contains metadata about the query, as well as the data set returned by the query. + This method limits the number of rows adds a `displayLimitReached: True` flag to the + metadata. + + :param max_rows_in_result: + :param sql_results: The results of a sql query from sql_lab.get_sql_results + :returns: The mutated sql_results structure + """ + + def is_require_to_apply() -> bool: + return ( + sql_results["status"] == QueryStatus.SUCCESS + and sql_results["query"]["rows"] > max_rows_in_result + ) + + if is_require_to_apply(): + sql_results["data"] = sql_results["data"][:max_rows_in_result] + sql_results["displayLimitReached"] = True + return sql_results diff --git a/superset/utils/sqllab_execution_context.py b/superset/utils/sqllab_execution_context.py index 09ae33d54da51..c8cc344715ad2 100644 --- a/superset/utils/sqllab_execution_context.py +++ b/superset/utils/sqllab_execution_context.py @@ -34,7 +34,6 @@ if TYPE_CHECKING: from superset.connectors.sqla.models import Database -QueryStatus = utils.QueryStatus logger = logging.getLogger(__name__) SqlResults = Dict[str, Any] diff --git a/superset/views/core.py b/superset/views/core.py index ba5495da8c161..7bdd77bd80c5f 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -60,6 +60,7 @@ viz, ) from superset.charts.dao import ChartDAO +from superset.common.db_query_status import QueryStatus from superset.connectors.base.models import BaseDatasource from superset.connectors.connector_registry import ConnectorRegistry from superset.connectors.sqla.models import ( @@ -92,13 +93,15 @@ from superset.models.dashboard import Dashboard from superset.models.datasource_access_request import DatasourceAccessRequest from superset.models.slice import Slice -from superset.models.sql_lab import LimitingFactor, Query, TabState +from superset.models.sql_lab import Query, TabState from superset.models.user_attributes import UserAttribute from superset.security.analytics_db_safety import check_sqlalchemy_uri from superset.sql_parse import ParsedQuery, Table from superset.sql_validators import get_validator_by_name from superset.sqllab.command import CommandResult, ExecuteSqlCommand from superset.sqllab.command_status import SqlJsonExecutionStatus +from superset.sqllab.limiting_factor import LimitingFactor +from superset.sqllab.utils import apply_display_max_row_configuration_if_require from superset.tasks.async_queries import load_explore_json_into_cache from superset.typing import FlaskResponse from superset.utils import core as utils, csv @@ -127,7 +130,6 @@ ) from superset.views.utils import ( _deserialize_results_payload, - apply_display_max_row_limit, bootstrap_user_data, check_datasource_perms, check_explore_cache_perms, @@ -145,7 +147,6 @@ SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT = config["SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT"] stats_logger = config["STATS_LOGGER"] DAR = DatasourceAccessRequest -QueryStatus = utils.QueryStatus logger = logging.getLogger(__name__) DATABASE_KEYS = [ @@ -2314,7 +2315,7 @@ def results_exec(key: str) -> FlaskResponse: status=400, ) from ex - obj = apply_display_max_row_limit(obj, rows) + obj = apply_display_max_row_configuration_if_require(obj, rows) return json_success( json.dumps( diff --git a/superset/views/utils.py b/superset/views/utils.py index 37d83619fe115..035f332aad3b6 100644 --- a/superset/views/utils.py +++ b/superset/views/utils.py @@ -32,6 +32,7 @@ import superset.models.core as models from superset import app, dataframe, db, result_set, viz +from superset.common.db_query_status import QueryStatus from superset.connectors.connector_registry import ConnectorRegistry from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import ( @@ -47,7 +48,7 @@ from superset.models.slice import Slice from superset.models.sql_lab import Query from superset.typing import FormData -from superset.utils.core import QueryStatus, TimeRangeEndpoint +from superset.utils.core import TimeRangeEndpoint from superset.utils.decorators import stats_timing from superset.viz import BaseViz diff --git a/superset/viz.py b/superset/viz.py index 3ab2e2b564fd0..5e22114765108 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -54,6 +54,7 @@ from pandas.tseries.frequencies import to_offset from superset import app, is_feature_enabled +from superset.common.db_query_status import QueryStatus from superset.constants import NULL_STRING from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import ( @@ -443,14 +444,14 @@ def get_payload(self, query_obj: Optional[QueryObjectDict] = None) -> VizPayload except SupersetSecurityException as ex: error = dataclasses.asdict(ex.error) self.errors.append(error) - self.status = utils.QueryStatus.FAILED + self.status = QueryStatus.FAILED payload = self.get_df_payload(query_obj) # if payload does not have a df, we are raising an error here. df = cast(Optional[pd.DataFrame], payload["df"]) - if self.status != utils.QueryStatus.FAILED: + if self.status != QueryStatus.FAILED: payload["data"] = self.get_data(df) if "df" in payload: del payload["df"] @@ -503,7 +504,7 @@ def get_df_payload( try: df = cache_value["df"] self.query = cache_value["query"] - self.status = utils.QueryStatus.SUCCESS + self.status = QueryStatus.SUCCESS is_loaded = True stats_logger.incr("loaded_from_cache") except Exception as ex: # pylint: disable=broad-except @@ -540,7 +541,7 @@ def get_df_payload( ) ) df = self.get_df(query_obj) - if self.status != utils.QueryStatus.FAILED: + if self.status != QueryStatus.FAILED: stats_logger.incr("loaded_from_source") if not self.force: stats_logger.incr("loaded_from_source_without_force") @@ -554,7 +555,7 @@ def get_df_payload( ) ) self.errors.append(error) - self.status = utils.QueryStatus.FAILED + self.status = QueryStatus.FAILED except Exception as ex: # pylint: disable=broad-except logger.exception(ex) @@ -566,10 +567,10 @@ def get_df_payload( ) ) self.errors.append(error) - self.status = utils.QueryStatus.FAILED + self.status = QueryStatus.FAILED stacktrace = utils.get_stacktrace() - if is_loaded and cache_key and self.status != utils.QueryStatus.FAILED: + if is_loaded and cache_key and self.status != QueryStatus.FAILED: set_and_log_cache( cache_manager.data_cache, cache_key, @@ -605,7 +606,7 @@ def json_dumps(query_obj: Any, sort_keys: bool = False) -> str: @staticmethod def has_error(payload: VizPayload) -> bool: return ( - payload.get("status") == utils.QueryStatus.FAILED + payload.get("status") == QueryStatus.FAILED or payload.get("error") is not None or bool(payload.get("errors")) ) diff --git a/tests/integration_tests/cache_tests.py b/tests/integration_tests/cache_tests.py index 9f0e6d9b29693..b600ab5579975 100644 --- a/tests/integration_tests/cache_tests.py +++ b/tests/integration_tests/cache_tests.py @@ -20,8 +20,8 @@ import pytest from superset import app, db +from superset.common.db_query_status import QueryStatus from superset.extensions import cache_manager -from superset.utils.core import QueryStatus from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, ) diff --git a/tests/integration_tests/celery_tests.py b/tests/integration_tests/celery_tests.py index eb55c7c924f88..f4224d20afe2b 100644 --- a/tests/integration_tests/celery_tests.py +++ b/tests/integration_tests/celery_tests.py @@ -36,11 +36,11 @@ from tests.integration_tests.conftest import CTAS_SCHEMA_NAME from tests.integration_tests.test_app import app from superset import db, sql_lab +from superset.common.db_query_status import QueryStatus from superset.result_set import SupersetResultSet from superset.db_engine_specs.base import BaseEngineSpec from superset.errors import ErrorLevel, SupersetErrorType from superset.extensions import celery_app -from superset.models.helpers import QueryStatus from superset.models.sql_lab import Query from superset.sql_parse import ParsedQuery, CtasMethod from superset.utils.core import get_example_database, backend diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index 57955c2a7620e..f91ab454f6b2f 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -53,6 +53,7 @@ security_manager, sql_lab, ) +from superset.common.db_query_status import QueryStatus from superset.connectors.sqla.models import SqlaTable from superset.db_engine_specs.base import BaseEngineSpec from superset.db_engine_specs.mssql import MssqlEngineSpec @@ -758,7 +759,7 @@ def test_custom_templated_sql_json(self, sql_lab_mock, mock_dt) -> None: self.login() sql = "SELECT '$DATE()' as test" resp = { - "status": utils.QueryStatus.SUCCESS, + "status": QueryStatus.SUCCESS, "query": {"rows": 1}, "data": [{"test": "'1970-01-01'"}], } @@ -1214,7 +1215,7 @@ def test_display_limit(self, mock_results_backend): data = [{"col_0": i} for i in range(100)] payload = { - "status": utils.QueryStatus.SUCCESS, + "status": QueryStatus.SUCCESS, "query": {"rows": 100}, "data": data, } @@ -1267,7 +1268,7 @@ def test_results_default_deserialization(self): query = { "database_id": 1, "sql": "SELECT * FROM birth_names LIMIT 100", - "status": utils.QueryStatus.PENDING, + "status": QueryStatus.PENDING, } ( serialized_data, @@ -1279,8 +1280,8 @@ def test_results_default_deserialization(self): ) payload = { "query_id": 1, - "status": utils.QueryStatus.SUCCESS, - "state": utils.QueryStatus.SUCCESS, + "status": QueryStatus.SUCCESS, + "state": QueryStatus.SUCCESS, "data": serialized_data, "columns": all_columns, "selected_columns": selected_columns, @@ -1315,7 +1316,7 @@ def test_results_msgpack_deserialization(self): query = { "database_id": 1, "sql": "SELECT * FROM birth_names LIMIT 100", - "status": utils.QueryStatus.PENDING, + "status": QueryStatus.PENDING, } ( serialized_data, @@ -1327,8 +1328,8 @@ def test_results_msgpack_deserialization(self): ) payload = { "query_id": 1, - "status": utils.QueryStatus.SUCCESS, - "state": utils.QueryStatus.SUCCESS, + "status": QueryStatus.SUCCESS, + "state": QueryStatus.SUCCESS, "data": serialized_data, "columns": all_columns, "selected_columns": selected_columns, diff --git a/tests/integration_tests/model_tests.py b/tests/integration_tests/model_tests.py index e84f9183faf89..e314a1371b418 100644 --- a/tests/integration_tests/model_tests.py +++ b/tests/integration_tests/model_tests.py @@ -31,10 +31,11 @@ import tests.integration_tests.test_app from superset import app, db as metadata_db from superset.db_engine_specs.postgres import PostgresEngineSpec +from superset.common.db_query_status import QueryStatus from superset.models.core import Database from superset.models.slice import Slice from superset.models.sql_types.base import literal_dttm_type_factory -from superset.utils.core import get_example_database, QueryStatus +from superset.utils.core import get_example_database from .base_tests import SupersetTestCase from .fixtures.energy_dashboard import load_energy_table_with_slice diff --git a/tests/integration_tests/queries/api_tests.py b/tests/integration_tests/queries/api_tests.py index e734af16543c7..45a807b7b2f8b 100644 --- a/tests/integration_tests/queries/api_tests.py +++ b/tests/integration_tests/queries/api_tests.py @@ -27,8 +27,9 @@ import tests.integration_tests.test_app from superset import db, security_manager +from superset.common.db_query_status import QueryStatus from superset.models.core import Database -from superset.utils.core import get_example_database, get_main_database, QueryStatus +from superset.utils.core import get_example_database, get_main_database from superset.models.sql_lab import Query from tests.integration_tests.base_tests import SupersetTestCase diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index 3657708ac7793..1e9751414688b 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -35,8 +35,9 @@ from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import SupersetErrorException from superset.models.core import Database -from superset.models.sql_lab import LimitingFactor, Query, SavedQuery +from superset.models.sql_lab import Query, SavedQuery from superset.result_set import SupersetResultSet +from superset.sqllab.limiting_factor import LimitingFactor from superset.sql_lab import ( cancel_query, execute_sql_statements,