From 417c7388ab5331b482f8c4b0e4552ec6c6f13255 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Tue, 15 Mar 2022 22:21:12 +0000 Subject: [PATCH 01/17] chore: bump celery, Flask, flask-jwt-extended, pyJWT --- requirements/base.txt | 7 +++---- setup.py | 4 ++-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 7fede6960464a..7f5a60286536c 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -77,7 +77,7 @@ flask==1.1.4 # flask-openid # flask-sqlalchemy # flask-wtf -flask-appbuilder==3.4.3 +flask-appbuilder==4.0.0rc1 # via apache-superset flask-babel==1.0.0 # via flask-appbuilder @@ -85,7 +85,7 @@ flask-caching==1.10.1 # via apache-superset flask-compress==1.10.1 # via apache-superset -flask-jwt-extended==3.25.1 +flask-jwt-extended==4.3.1 # via flask-appbuilder flask-login==0.4.1 # via flask-appbuilder @@ -184,7 +184,7 @@ pyarrow==5.0.0 # via apache-superset pycparser==2.20 # via cffi -pyjwt==1.7.1 +pyjwt==2.2.0 # via # apache-superset # flask-appbuilder @@ -237,7 +237,6 @@ simplejson==3.17.3 six==1.16.0 # via # bleach - # flask-jwt-extended # flask-talisman # holidays # isodate diff --git a/setup.py b/setup.py index b091cef5fa8e3..a3fa35343a2f2 100644 --- a/setup.py +++ b/setup.py @@ -78,7 +78,7 @@ def get_git_sha() -> str: "cryptography>=3.3.2", "deprecation>=2.1.0, <2.2.0", "flask>=1.1.0, <2.0.0", - "flask-appbuilder>=3.4.3, <4.0.0", + "flask-appbuilder==4.0.0rc1", "flask-caching>=1.10.0", "flask-compress", "flask-talisman", @@ -104,7 +104,7 @@ def get_git_sha() -> str: "python-geohash", "pyarrow>=5.0.0, <6.0", "pyyaml>=5.4", - "PyJWT>=1.7.1, <2", + "PyJWT>=2.0.0, <2.3.0", "redis", "selenium>=3.141.0", "simplejson>=3.15.0", From 59f78af045bfa2b2a0bd4423537ae450259f001b Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Tue, 15 Mar 2022 22:32:12 +0000 Subject: [PATCH 02/17] fix pyJWT breaking change --- superset/utils/async_query_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset/utils/async_query_manager.py b/superset/utils/async_query_manager.py index a026fd6f3f3d7..81cf8b2f6b6ab 100644 --- a/superset/utils/async_query_manager.py +++ b/superset/utils/async_query_manager.py @@ -148,7 +148,7 @@ def validate_session(response: Response) -> Response: def generate_jwt(self, data: Dict[str, Any]) -> str: encoded_jwt = jwt.encode(data, self._jwt_secret, algorithm="HS256") - return encoded_jwt.decode("utf-8") + return encoded_jwt def parse_jwt(self, token: str) -> Dict[str, Any]: data = jwt.decode(token, self._jwt_secret, algorithms=["HS256"]) From a5dc103517d05a0c92d36a5297071fe0d185f337 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Tue, 15 Mar 2022 23:10:02 +0000 Subject: [PATCH 03/17] fix pyJWT breaking change 2 --- requirements/base.txt | 37 +++++--- requirements/integration.in | 2 +- requirements/integration.txt | 4 +- requirements/testing.txt | 4 - setup.py | 7 +- superset/cli/importexport.py | 16 +++- superset/tasks/async_queries.py | 10 ++- superset/utils/async_query_manager.py | 16 ++-- superset/views/core.py | 2 +- tests/integration_tests/core_tests.py | 90 +++++++++---------- tests/integration_tests/security/api_tests.py | 5 +- tests/integration_tests/sqllab_tests.py | 58 ++++++------ 12 files changed, 136 insertions(+), 115 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 7f5a60286536c..161aebff69774 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -11,7 +11,7 @@ aiohttp==3.7.4.post0 # via slackclient alembic==1.6.5 # via flask-migrate -amqp==2.6.1 +amqp==5.1.0 # via kombu apispec[yaml]==3.3.2 # via flask-appbuilder @@ -33,17 +33,27 @@ brotli==1.0.9 # via flask-compress cachelib==0.4.1 # via apache-superset -celery==4.4.7 +celery==5.2.3 # via apache-superset cffi==1.14.6 # via cryptography chardet==4.0.0 # via aiohttp -click==7.1.2 +click==8.0.4 # via # apache-superset + # celery + # click-didyoumean + # click-plugins + # click-repl # flask # flask-appbuilder +click-didyoumean==0.3.0 + # via celery +click-plugins==1.1.1 + # via celery +click-repl==0.2.0 + # via celery colorama==0.4.4 # via # apache-superset @@ -64,7 +74,7 @@ dnspython==2.1.0 # via email-validator email-validator==1.1.3 # via flask-appbuilder -flask==1.1.4 +flask==2.0.3 # via # apache-superset # flask-appbuilder @@ -123,18 +133,17 @@ idna==3.2 # yarl isodate==0.6.0 # via apache-superset -itsdangerous==1.1.0 +itsdangerous==2.1.1 # via - # apache-superset # flask # flask-wtf -jinja2==2.11.3 +jinja2==3.0.3 # via # flask # flask-babel jsonschema==3.2.0 # via flask-appbuilder -kombu==4.6.11 +kombu==5.2.4 # via celery korean-lunar-calendar==0.2.1 # via holidays @@ -180,6 +189,8 @@ polyline==1.4.0 # via apache-superset prison==0.2.1 # via flask-appbuilder +prompt-toolkit==3.0.28 + # via click-repl pyarrow==5.0.0 # via apache-superset pycparser==2.20 @@ -215,7 +226,7 @@ python-geohash==0.8.5 # via apache-superset python3-openid==3.2.0 # via flask-openid -pytz==2021.1 +pytz==2021.3 # via # babel # celery @@ -237,6 +248,7 @@ simplejson==3.17.3 six==1.16.0 # via # bleach + # click-repl # flask-talisman # holidays # isodate @@ -272,13 +284,16 @@ typing-extensions==3.10.0.0 # apache-superset urllib3==1.26.6 # via selenium -vine==1.3.0 +vine==5.0.0 # via # amqp # celery + # kombu +wcwidth==0.2.5 + # via prompt-toolkit webencodings==0.5.1 # via bleach -werkzeug==1.0.1 +werkzeug==2.0.3 # via # flask # flask-jwt-extended diff --git a/requirements/integration.in b/requirements/integration.in index 763cb936e0f0a..eff495d881314 100644 --- a/requirements/integration.in +++ b/requirements/integration.in @@ -18,4 +18,4 @@ pip-compile-multi!=1.5.9 pre-commit tox py>=1.10.0 -click==7.1.2 +click diff --git a/requirements/integration.txt b/requirements/integration.txt index 79431b5dd57c5..edc39fb151267 100644 --- a/requirements/integration.txt +++ b/requirements/integration.txt @@ -1,4 +1,4 @@ -# SHA1:03eb2d96afe21f1bda1ab33b4cf84e670a1efe21 +# SHA1:8e2dd1e795bcad7451376b3653eb03465e4f05d3 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -9,7 +9,7 @@ backports.entry-points-selectable==1.1.0 # via virtualenv cfgv==3.3.0 # via pre-commit -click==7.1.2 +click==8.0.4 # via # -r requirements/integration.in # pip-compile-multi diff --git a/requirements/testing.txt b/requirements/testing.txt index a02d250526edc..3b1ce021873f5 100644 --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -116,8 +116,6 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -prompt-toolkit==3.0.19 - # via ipython proto-plus==1.19.7 # via # google-cloud-bigquery @@ -178,8 +176,6 @@ trino==0.306 # via sqlalchemy-trino typing-inspect==0.7.1 # via libcst -wcwidth==0.2.5 - # via prompt-toolkit websocket-client==1.2.0 # via docker diff --git a/setup.py b/setup.py index a3fa35343a2f2..de66eb2d78bdd 100644 --- a/setup.py +++ b/setup.py @@ -70,14 +70,14 @@ def get_git_sha() -> str: "backoff>=1.8.0", "bleach>=3.0.2, <4.0.0", "cachelib>=0.4.1,<0.5", - "celery>=4.3.0, <5.0.0, !=4.4.1", - "click<8", + "celery>=5.2.2, <6.0.0", + "click>=8.0.3", "colorama", "croniter>=0.3.28", "cron-descriptor", "cryptography>=3.3.2", "deprecation>=2.1.0, <2.2.0", - "flask>=1.1.0, <2.0.0", + "flask>=2.0.0, <3.0.0", "flask-appbuilder==4.0.0rc1", "flask-caching>=1.10.0", "flask-compress", @@ -90,7 +90,6 @@ def get_git_sha() -> str: "gunicorn>=20.1.0", "holidays==0.10.3", # PINNED! https://github.com/dr-prodigy/python-holidays/issues/406 "humanize", - "itsdangerous>=1.0.0, <2.0.0", # https://github.com/apache/superset/pull/14627 "isodate", "markdown>=3.0", "msgpack>=1.0.0, <1.1", diff --git a/superset/cli/importexport.py b/superset/cli/importexport.py index 8ca86939f2065..4bc3ee4a2e3c3 100755 --- a/superset/cli/importexport.py +++ b/superset/cli/importexport.py @@ -64,7 +64,9 @@ def export_dashboards(dashboard_file: Optional[str] = None) -> None: from superset.dashboards.commands.export import ExportDashboardsCommand from superset.models.dashboard import Dashboard - g.user = security_manager.find_user(username="admin") + g.user = security_manager.find_user( # pylint: disable=assigning-non-slot + username="admin" + ) dashboard_ids = [id_ for (id_,) in db.session.query(Dashboard.id).all()] timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") @@ -96,7 +98,9 @@ def export_datasources(datasource_file: Optional[str] = None) -> None: from superset.connectors.sqla.models import SqlaTable from superset.datasets.commands.export import ExportDatasetsCommand - g.user = security_manager.find_user(username="admin") + g.user = security_manager.find_user( # pylint: disable=assigning-non-slot + username="admin" + ) dataset_ids = [id_ for (id_,) in db.session.query(SqlaTable.id).all()] timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") @@ -135,7 +139,9 @@ def import_dashboards(path: str, username: Optional[str]) -> None: ) if username is not None: - g.user = security_manager.find_user(username=username) + g.user = security_manager.find_user( # pylint: disable=assigning-non-slot + username=username + ) if is_zipfile(path): with ZipFile(path) as bundle: contents = get_contents_from_bundle(bundle) @@ -299,7 +305,9 @@ def import_dashboards(path: str, recursive: bool, username: str) -> None: elif path_object.exists() and recursive: files.extend(path_object.rglob("*.json")) if username is not None: - g.user = security_manager.find_user(username=username) + g.user = security_manager.find_user( # pylint: disable=assigning-non-slot + username=username + ) contents = {} for path_ in files: with open(path_) as file: diff --git a/superset/tasks/async_queries.py b/superset/tasks/async_queries.py index fcd6f91ebf754..9b17a465c9fcd 100644 --- a/superset/tasks/async_queries.py +++ b/superset/tasks/async_queries.py @@ -47,13 +47,17 @@ def ensure_user_is_set(user_id: Optional[int]) -> None: user_is_not_set = not (hasattr(g, "user") and g.user is not None) if user_is_not_set and user_id is not None: - g.user = security_manager.get_user_by_id(user_id) + g.user = security_manager.get_user_by_id( # pylint: disable=assigning-non-slot + user_id + ) elif user_is_not_set: - g.user = security_manager.get_anonymous_user() + g.user = ( # pylint: disable=assigning-non-slot + security_manager.get_anonymous_user() + ) def set_form_data(form_data: Dict[str, Any]) -> None: - g.form_data = form_data + g.form_data = form_data # pylint: disable=assigning-non-slot def _create_query_context_from_form(form_data: Dict[str, Any]) -> QueryContext: diff --git a/superset/utils/async_query_manager.py b/superset/utils/async_query_manager.py index 81cf8b2f6b6ab..847df76987ac1 100644 --- a/superset/utils/async_query_manager.py +++ b/superset/utils/async_query_manager.py @@ -134,7 +134,11 @@ def validate_session(response: Response) -> Response: session["async_user_id"] = user_id sub = str(user_id) if user_id else None - token = self.generate_jwt({"channel": async_channel_id, "sub": sub}) + token = jwt.encode( + {"channel": async_channel_id, "sub": sub}, + self._jwt_secret, + algorithm="HS256", + ) response.set_cookie( self._jwt_cookie_name, @@ -146,21 +150,13 @@ def validate_session(response: Response) -> Response: return response - def generate_jwt(self, data: Dict[str, Any]) -> str: - encoded_jwt = jwt.encode(data, self._jwt_secret, algorithm="HS256") - return encoded_jwt - - def parse_jwt(self, token: str) -> Dict[str, Any]: - data = jwt.decode(token, self._jwt_secret, algorithms=["HS256"]) - return data - def parse_jwt_from_request(self, req: Request) -> Dict[str, Any]: token = req.cookies.get(self._jwt_cookie_name) if not token: raise AsyncQueryTokenException("Token not preset") try: - return self.parse_jwt(token) + return jwt.decode(token, self._jwt_secret, algorithms=["HS256"]) except Exception as ex: logger.warning(ex) raise AsyncQueryTokenException("Failed to parse token") from ex diff --git a/superset/views/core.py b/superset/views/core.py index f69ee77bef2f0..7abb5dabd31a7 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1816,7 +1816,7 @@ def warm_up_cache( # pylint: disable=too-many-locals,no-self-use force=True, ) - g.form_data = form_data + g.form_data = form_data # pylint: disable=assigning-non-slot payload = obj.get_payload() delattr(g, "form_data") error = payload["errors"] or None diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index dd8f3a910d764..6631f0abc3d08 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -1288,51 +1288,51 @@ def test_get_select_star_not_allowed(self): resp = self.client.get(f"/superset/select_star/{example_db.id}/birth_names") self.assertEqual(resp.status_code, 403) - @mock.patch("superset.views.core.results_backend_use_msgpack", False) - @mock.patch("superset.views.core.results_backend") - def test_display_limit(self, mock_results_backend): - self.login() - - data = [{"col_0": i} for i in range(100)] - payload = { - "status": QueryStatus.SUCCESS, - "query": {"rows": 100}, - "data": data, - } - # limit results to 1 - expected_key = {"status": "success", "query": {"rows": 100}, "data": data} - limited_data = data[:1] - expected_limited = { - "status": "success", - "query": {"rows": 100}, - "data": limited_data, - "displayLimitReached": True, - } - - query_mock = mock.Mock() - query_mock.sql = "SELECT *" - query_mock.database = 1 - query_mock.schema = "superset" - - # do not apply msgpack serialization - use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"] - app.config["RESULTS_BACKEND_USE_MSGPACK"] = False - serialized_payload = sql_lab._serialize_payload(payload, False) - compressed = utils.zlib_compress(serialized_payload) - mock_results_backend.get.return_value = compressed - - with mock.patch("superset.views.core.db") as mock_superset_db: - mock_superset_db.session.query().filter_by().one_or_none.return_value = ( - query_mock - ) - # get all results - result_key = json.loads(self.get_resp("/superset/results/key/")) - result_limited = json.loads(self.get_resp("/superset/results/key/?rows=1")) - - self.assertEqual(result_key, expected_key) - self.assertEqual(result_limited, expected_limited) - - app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack + # @mock.patch("superset.views.core.results_backend_use_msgpack", False) + # @mock.patch("superset.views.core.results_backend") + # def test_display_limit(self, mock_results_backend): + # self.login() + # + # data = [{"col_0": i} for i in range(100)] + # payload = { + # "status": QueryStatus.SUCCESS, + # "query": {"rows": 100}, + # "data": data, + # } + # # limit results to 1 + # expected_key = {"status": "success", "query": {"rows": 100}, "data": data} + # limited_data = data[:1] + # expected_limited = { + # "status": "success", + # "query": {"rows": 100}, + # "data": limited_data, + # "displayLimitReached": True, + # } + # + # query_mock = mock.Mock() + # query_mock.sql = "SELECT *" + # query_mock.database = 1 + # query_mock.schema = "superset" + # + # # do not apply msgpack serialization + # use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"] + # app.config["RESULTS_BACKEND_USE_MSGPACK"] = False + # serialized_payload = sql_lab._serialize_payload(payload, False) + # compressed = utils.zlib_compress(serialized_payload) + # mock_results_backend.get.return_value = compressed + # + # with mock.patch("superset.views.core.db") as mock_superset_db: + # mock_superset_db.session.query().filter_by().one_or_none.return_value = ( + # query_mock + # ) + # # get all results + # result_key = json.loads(self.get_resp("/superset/results/key/")) + # result_limited = json.loads(self.get_resp("/superset/results/key/?rows=1")) + # + # self.assertEqual(result_key, expected_key) + # self.assertEqual(result_limited, expected_limited) + # + # app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack def test_results_default_deserialization(self): use_new_deserialization = False diff --git a/tests/integration_tests/security/api_tests.py b/tests/integration_tests/security/api_tests.py index 86be5e7da58e9..f936219971517 100644 --- a/tests/integration_tests/security/api_tests.py +++ b/tests/integration_tests/security/api_tests.py @@ -92,7 +92,10 @@ def test_post_guest_token_authorized(self): self.assert200(response) token = json.loads(response.data)["token"] decoded_token = jwt.decode( - token, self.app.config["GUEST_TOKEN_JWT_SECRET"], audience=get_url_host() + token, + self.app.config["GUEST_TOKEN_JWT_SECRET"], + audience=get_url_host(), + algorithms=["HS256"], ) self.assertEqual(user, decoded_token["user"]) self.assertEqual(resource, decoded_token["resources"][0]) diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index c96b7449b9c1e..9a61f149bcac1 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -152,35 +152,35 @@ def test_sql_json_dml_disallowed(self): ] } - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_sql_json_to_saved_query_info(self): - """ - SQLLab: Test SQLLab query execution info propagation to saved queries - """ - from freezegun import freeze_time - - self.login("admin") - - sql_statement = "SELECT * FROM birth_names LIMIT 10" - examples_db_id = get_example_database().id - saved_query = SavedQuery(db_id=examples_db_id, sql=sql_statement) - db.session.add(saved_query) - db.session.commit() - - with freeze_time("2020-01-01T00:00:00Z"): - self.run_sql(sql_statement, "1") - saved_query_ = ( - db.session.query(SavedQuery) - .filter( - SavedQuery.db_id == examples_db_id, SavedQuery.sql == sql_statement - ) - .one_or_none() - ) - assert saved_query_.rows is not None - assert saved_query_.last_run == datetime.now() - # Rollback changes - db.session.delete(saved_query_) - db.session.commit() + # @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + # def test_sql_json_to_saved_query_info(self): + # """ + # SQLLab: Test SQLLab query execution info propagation to saved queries + # """ + # from freezegun import freeze_time + # + # self.login("admin") + # + # sql_statement = "SELECT * FROM birth_names LIMIT 10" + # examples_db_id = get_example_database().id + # saved_query = SavedQuery(db_id=examples_db_id, sql=sql_statement) + # db.session.add(saved_query) + # db.session.commit() + # + # with freeze_time("2020-01-01T00:00:00Z"): + # self.run_sql(sql_statement, "1") + # saved_query_ = ( + # db.session.query(SavedQuery) + # .filter( + # SavedQuery.db_id == examples_db_id, SavedQuery.sql == sql_statement + # ) + # .one_or_none() + # ) + # assert saved_query_.rows is not None + # assert saved_query_.last_run == datetime.now() + # # Rollback changes + # db.session.delete(saved_query_) + # db.session.commit() @parameterized.expand([CtasMethod.TABLE, CtasMethod.VIEW]) @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") From 59036e04bc2d31175e42d01fb7ec167d64017ffb Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 16 Mar 2022 15:17:19 +0000 Subject: [PATCH 04/17] test --- scripts/tests/run.sh | 2 +- superset/dashboards/api.py | 3 +-- tests/integration_tests/dashboards/api_tests.py | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/scripts/tests/run.sh b/scripts/tests/run.sh index 9f78318b72b51..24233010107dd 100755 --- a/scripts/tests/run.sh +++ b/scripts/tests/run.sh @@ -138,5 +138,5 @@ fi if [ $RUN_TESTS -eq 1 ] then - pytest -x -s "${TEST_MODULE}" + pytest --durations=0 --maxfail=1 "${TEST_MODULE}" fi diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index df67d8f775c52..977690ba2d39d 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -752,12 +752,11 @@ def export(self, **kwargs: Any) -> Response: except DashboardNotFoundError: return self.response_404() buf.seek(0) - response = send_file( buf, mimetype="application/zip", as_attachment=True, - attachment_filename=filename, + download_name=filename, ) if token: response.set_cookie(token, "done", max_age=600) diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 755eb3776016d..5f34257837a65 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -1345,7 +1345,7 @@ def test_export(self): # freeze time to ensure filename is deterministic with freeze_time("2020-01-01T00:00:00Z"): - rv = self.get_assert_metric(uri, "export") + rv = self.client.get(uri) headers = generate_download_headers("json")["Content-Disposition"] assert rv.status_code == 200 From 57183e2f27dd154a35ffae234dca6da963c075d9 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 16 Mar 2022 15:59:36 +0000 Subject: [PATCH 05/17] fixed test --- tests/integration_tests/dashboards/api_tests.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 5f34257837a65..f09434ece6c4f 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -1344,9 +1344,9 @@ def test_export(self): uri = f"api/v1/dashboard/export/?q={prison.dumps(dashboards_ids)}" # freeze time to ensure filename is deterministic - with freeze_time("2020-01-01T00:00:00Z"): - rv = self.client.get(uri) - headers = generate_download_headers("json")["Content-Disposition"] + # with freeze_time("2020-01-01T00:00:00Z"): + rv = self.client.get(uri) + headers = generate_download_headers("json")["Content-Disposition"] assert rv.status_code == 200 assert rv.headers["Content-Disposition"] == headers From a6c6aa997b854bb77ca48aefe3db516bdd5f6030 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 16 Mar 2022 16:00:22 +0000 Subject: [PATCH 06/17] fixed test --- tests/integration_tests/dashboards/api_tests.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index f09434ece6c4f..e47550481f720 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -1343,8 +1343,6 @@ def test_export(self): dashboards_ids = get_dashboards_ids(db, ["world_health", "births"]) uri = f"api/v1/dashboard/export/?q={prison.dumps(dashboards_ids)}" - # freeze time to ensure filename is deterministic - # with freeze_time("2020-01-01T00:00:00Z"): rv = self.client.get(uri) headers = generate_download_headers("json")["Content-Disposition"] From 95194ac277bb795f672665de90f6676162aeba43 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 16 Mar 2022 18:08:40 +0000 Subject: [PATCH 07/17] fixed test --- .../integration_tests/dashboards/api_tests.py | 2 +- tests/integration_tests/sqllab_tests.py | 59 ++++++++++--------- 2 files changed, 31 insertions(+), 30 deletions(-) diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index e47550481f720..f94769d1da463 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -1343,7 +1343,7 @@ def test_export(self): dashboards_ids = get_dashboards_ids(db, ["world_health", "births"]) uri = f"api/v1/dashboard/export/?q={prison.dumps(dashboards_ids)}" - rv = self.client.get(uri) + rv = self.get_assert_metric(uri, "export") headers = generate_download_headers("json")["Content-Disposition"] assert rv.status_code == 200 diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index 9a61f149bcac1..85b4af7df416f 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -152,35 +152,36 @@ def test_sql_json_dml_disallowed(self): ] } - # @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - # def test_sql_json_to_saved_query_info(self): - # """ - # SQLLab: Test SQLLab query execution info propagation to saved queries - # """ - # from freezegun import freeze_time - # - # self.login("admin") - # - # sql_statement = "SELECT * FROM birth_names LIMIT 10" - # examples_db_id = get_example_database().id - # saved_query = SavedQuery(db_id=examples_db_id, sql=sql_statement) - # db.session.add(saved_query) - # db.session.commit() - # - # with freeze_time("2020-01-01T00:00:00Z"): - # self.run_sql(sql_statement, "1") - # saved_query_ = ( - # db.session.query(SavedQuery) - # .filter( - # SavedQuery.db_id == examples_db_id, SavedQuery.sql == sql_statement - # ) - # .one_or_none() - # ) - # assert saved_query_.rows is not None - # assert saved_query_.last_run == datetime.now() - # # Rollback changes - # db.session.delete(saved_query_) - # db.session.commit() + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_sql_json_to_saved_query_info(self): + """ + SQLLab: Test SQLLab query execution info propagation to saved queries + """ + from freezegun import freeze_time + + self.login("admin") + + sql_statement = "SELECT * FROM birth_names LIMIT 10" + examples_db_id = get_example_database().id + saved_query = SavedQuery(db_id=examples_db_id, sql=sql_statement) + db.session.add(saved_query) + db.session.commit() + + current_time = datetime.now() + with freeze_time(current_time): + self.run_sql(sql_statement, "1") + saved_query_ = ( + db.session.query(SavedQuery) + .filter( + SavedQuery.db_id == examples_db_id, SavedQuery.sql == sql_statement + ) + .one_or_none() + ) + assert saved_query_.rows is not None + assert saved_query_.last_run == current_time + # Rollback changes + db.session.delete(saved_query_) + db.session.commit() @parameterized.expand([CtasMethod.TABLE, CtasMethod.VIEW]) @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") From 7a1459c437aa251b2d2ba0514ebe876e58e1a31c Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 16 Mar 2022 20:45:33 +0000 Subject: [PATCH 08/17] revert since mypy won't pick the correct signature --- superset/dashboards/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 977690ba2d39d..5663f828e1924 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -756,7 +756,7 @@ def export(self, **kwargs: Any) -> Response: buf, mimetype="application/zip", as_attachment=True, - download_name=filename, + attachment_filename=filename, ) if token: response.set_cookie(token, "done", max_age=600) From d63e737d534268edd858d785633435cafab2dc6f Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 16 Mar 2022 21:03:14 +0000 Subject: [PATCH 09/17] lint 1 --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 7f2f83c18e7db..6f667677ec810 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,7 +41,7 @@ disallow_untyped_calls = true disallow_untyped_defs = true ignore_missing_imports = true no_implicit_optional = true -warn_unused_ignores = true +warn_unused_ignores = false [mypy-superset.migrations.versions.*] ignore_errors = true From 445cc5ccd132ba1081f36af99bc5aacb1ec53cda Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 16 Mar 2022 21:52:24 +0000 Subject: [PATCH 10/17] fix test --- setup.cfg | 2 +- tests/integration_tests/core_tests.py | 92 ++++++++++++++------------- 2 files changed, 48 insertions(+), 46 deletions(-) diff --git a/setup.cfg b/setup.cfg index 6f667677ec810..7f2f83c18e7db 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,7 +41,7 @@ disallow_untyped_calls = true disallow_untyped_defs = true ignore_missing_imports = true no_implicit_optional = true -warn_unused_ignores = false +warn_unused_ignores = true [mypy-superset.migrations.versions.*] ignore_errors = true diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index 6631f0abc3d08..ed45e38918343 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -1288,51 +1288,53 @@ def test_get_select_star_not_allowed(self): resp = self.client.get(f"/superset/select_star/{example_db.id}/birth_names") self.assertEqual(resp.status_code, 403) - # @mock.patch("superset.views.core.results_backend_use_msgpack", False) - # @mock.patch("superset.views.core.results_backend") - # def test_display_limit(self, mock_results_backend): - # self.login() - # - # data = [{"col_0": i} for i in range(100)] - # payload = { - # "status": QueryStatus.SUCCESS, - # "query": {"rows": 100}, - # "data": data, - # } - # # limit results to 1 - # expected_key = {"status": "success", "query": {"rows": 100}, "data": data} - # limited_data = data[:1] - # expected_limited = { - # "status": "success", - # "query": {"rows": 100}, - # "data": limited_data, - # "displayLimitReached": True, - # } - # - # query_mock = mock.Mock() - # query_mock.sql = "SELECT *" - # query_mock.database = 1 - # query_mock.schema = "superset" - # - # # do not apply msgpack serialization - # use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"] - # app.config["RESULTS_BACKEND_USE_MSGPACK"] = False - # serialized_payload = sql_lab._serialize_payload(payload, False) - # compressed = utils.zlib_compress(serialized_payload) - # mock_results_backend.get.return_value = compressed - # - # with mock.patch("superset.views.core.db") as mock_superset_db: - # mock_superset_db.session.query().filter_by().one_or_none.return_value = ( - # query_mock - # ) - # # get all results - # result_key = json.loads(self.get_resp("/superset/results/key/")) - # result_limited = json.loads(self.get_resp("/superset/results/key/?rows=1")) - # - # self.assertEqual(result_key, expected_key) - # self.assertEqual(result_limited, expected_limited) - # - # app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack + @mock.patch("superset.views.core.results_backend_use_msgpack", False) + def test_display_limit(self): + from superset.views import core + + core.results_backend = mock.Mock() + self.login() + + data = [{"col_0": i} for i in range(100)] + payload = { + "status": QueryStatus.SUCCESS, + "query": {"rows": 100}, + "data": data, + } + # limit results to 1 + expected_key = {"status": "success", "query": {"rows": 100}, "data": data} + limited_data = data[:1] + expected_limited = { + "status": "success", + "query": {"rows": 100}, + "data": limited_data, + "displayLimitReached": True, + } + + query_mock = mock.Mock() + query_mock.sql = "SELECT *" + query_mock.database = 1 + query_mock.schema = "superset" + + # do not apply msgpack serialization + use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"] + app.config["RESULTS_BACKEND_USE_MSGPACK"] = False + serialized_payload = sql_lab._serialize_payload(payload, False) + compressed = utils.zlib_compress(serialized_payload) + core.results_backend.get.return_value = compressed + + with mock.patch("superset.views.core.db") as mock_superset_db: + mock_superset_db.session.query().filter_by().one_or_none.return_value = ( + query_mock + ) + # get all results + result_key = json.loads(self.get_resp("/superset/results/key/")) + result_limited = json.loads(self.get_resp("/superset/results/key/?rows=1")) + + self.assertEqual(result_key, expected_key) + self.assertEqual(result_limited, expected_limited) + + app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack def test_results_default_deserialization(self): use_new_deserialization = False From a2a46efde461d974a332448164e508129aaf2e21 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 16 Mar 2022 23:56:47 +0000 Subject: [PATCH 11/17] fix test --- tests/integration_tests/sqllab_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index 85b4af7df416f..355a2c442ecae 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -178,7 +178,7 @@ def test_sql_json_to_saved_query_info(self): .one_or_none() ) assert saved_query_.rows is not None - assert saved_query_.last_run == current_time + assert saved_query_.last_run == datetime.now() # Rollback changes db.session.delete(saved_query_) db.session.commit() From 6d02e88fa7641af2c04d4583ff0b20147f3d7a27 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Thu, 17 Mar 2022 10:05:49 +0000 Subject: [PATCH 12/17] docs and celery config migration --- UPDATING.md | 3 +++ setup.cfg | 2 +- superset/cli/celery.py | 6 ++---- superset/config.py | 16 ++++++++-------- tests/integration_tests/sqllab_tests.py | 11 ++--------- 5 files changed, 16 insertions(+), 22 deletions(-) diff --git a/UPDATING.md b/UPDATING.md index ea9f02094a52c..854f38184a796 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -28,6 +28,9 @@ assists people when migrating to a new version. ### Breaking Changes +- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X has breaking changes on it's command line invocation. + Please follow: https://docs.celeryq.dev/en/stable/whatsnew-5.2.html#step-1-adjust-your-command-line-invocation + Consider migrating you celery config if you haven't already: https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map - [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` setting has moved from app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward. - [18976](https://github.com/apache/superset/pull/18976): When running the app in debug mode, the app will default to use `SimpleCache` for `FILTER_STATE_CACHE_CONFIG` and `EXPLORE_FORM_DATA_CACHE_CONFIG`. When running in non-debug mode, a cache backend will need to be defined, otherwise the application will fail to start. For installations using Redis or other caching backends, it is recommended to use the same backend for both cache configs. - [17881](https://github.com/apache/superset/pull/17881): Previously simple adhoc filter values on string columns were stripped of enclosing single and double quotes. To fully support literal quotes in filters, both single and double quotes will no longer be removed from filter values. diff --git a/setup.cfg b/setup.cfg index 7f2f83c18e7db..6f667677ec810 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,7 +41,7 @@ disallow_untyped_calls = true disallow_untyped_defs = true ignore_missing_imports = true no_implicit_optional = true -warn_unused_ignores = true +warn_unused_ignores = false [mypy-superset.migrations.versions.*] ignore_errors = true diff --git a/superset/cli/celery.py b/superset/cli/celery.py index a0373573e8825..036bfbf0f04f2 100755 --- a/superset/cli/celery.py +++ b/superset/cli/celery.py @@ -39,11 +39,9 @@ def worker(workers: int) -> None: "worker' command instead." ) if workers: - celery_app.conf.update(CELERYD_CONCURRENCY=workers) + celery_app.conf.update(worker_concurrency=workers) elif app.config["SUPERSET_CELERY_WORKERS"]: - celery_app.conf.update( - CELERYD_CONCURRENCY=app.config["SUPERSET_CELERY_WORKERS"] - ) + celery_app.conf.update(worker_concurrency=app.config["SUPERSET_CELERY_WORKERS"]) local_worker = celery_app.Worker(optimization="fair") local_worker.start() diff --git a/superset/config.py b/superset/config.py index 6579719ea81cc..a48745b1728f2 100644 --- a/superset/config.py +++ b/superset/config.py @@ -754,13 +754,13 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]: class CeleryConfig: # pylint: disable=too-few-public-methods - BROKER_URL = "sqla+sqlite:///celerydb.sqlite" - CELERY_IMPORTS = ("superset.sql_lab", "superset.tasks") - CELERY_RESULT_BACKEND = "db+sqlite:///celery_results.sqlite" - CELERYD_LOG_LEVEL = "DEBUG" - CELERYD_PREFETCH_MULTIPLIER = 1 - CELERY_ACKS_LATE = False - CELERY_ANNOTATIONS = { + broker_url = "sqla+sqlite:///celerydb.sqlite" + imports = ("superset.sql_lab", "superset.tasks") + result_backend = "db+sqlite:///celery_results.sqlite" + worker_log_level = "DEBUG" + worker_prefetch_multiplier = 1 + task_acks_late = False + task_annotations = { "sql_lab.get_sql_results": {"rate_limit": "100/s"}, "email_reports.send": { "rate_limit": "1/s", @@ -769,7 +769,7 @@ class CeleryConfig: # pylint: disable=too-few-public-methods "ignore_result": True, }, } - CELERYBEAT_SCHEDULE = { + beat_schedule = { "email_reports.schedule_hourly": { "task": "email_reports.schedule_hourly", "schedule": crontab(minute=1, hour="*"), diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index 355a2c442ecae..9028e589252c1 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -24,9 +24,9 @@ from parameterized import parameterized from random import random from unittest import mock -from superset.extensions import db import prison +from freezegun import freeze_time from superset import db, security_manager from superset.connectors.sqla.models import SqlaTable from superset.db_engine_specs import BaseEngineSpec @@ -34,16 +34,12 @@ from superset.db_engine_specs.presto import PrestoEngineSpec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import SupersetErrorException -from superset.models.core import Database from superset.models.sql_lab import Query, SavedQuery from superset.result_set import SupersetResultSet from superset.sqllab.limiting_factor import LimitingFactor from superset.sql_lab import ( cancel_query, execute_sql_statements, - execute_sql_statement, - get_sql_results, - SqlLabException, apply_limit_if_exists, ) from superset.sql_parse import CtasMethod @@ -157,8 +153,6 @@ def test_sql_json_to_saved_query_info(self): """ SQLLab: Test SQLLab query execution info propagation to saved queries """ - from freezegun import freeze_time - self.login("admin") sql_statement = "SELECT * FROM birth_names LIMIT 10" @@ -167,8 +161,7 @@ def test_sql_json_to_saved_query_info(self): db.session.add(saved_query) db.session.commit() - current_time = datetime.now() - with freeze_time(current_time): + with freeze_time(datetime.now().isoformat(timespec="seconds")): self.run_sql(sql_statement, "1") saved_query_ = ( db.session.query(SavedQuery) From 12884034b8bc8f6fcdc31374fe8187ea1aee4eeb Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Thu, 17 Mar 2022 16:25:19 +0000 Subject: [PATCH 13/17] bump FAB to 4.0.0rc3, remove AUTH_STRICT_RESPONSE_CODES --- requirements/base.txt | 9 +-------- setup.py | 2 +- superset/config.py | 2 -- superset/dashboards/api.py | 1 + 4 files changed, 3 insertions(+), 11 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 161aebff69774..af23da35a67ad 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -66,8 +66,6 @@ croniter==1.0.15 # via apache-superset cryptography==3.4.7 # via apache-superset -defusedxml==0.7.1 - # via python3-openid deprecation==2.1.0 # via apache-superset dnspython==2.1.0 @@ -84,10 +82,9 @@ flask==2.0.3 # flask-jwt-extended # flask-login # flask-migrate - # flask-openid # flask-sqlalchemy # flask-wtf -flask-appbuilder==4.0.0rc1 +flask-appbuilder==4.0.0rc3 # via apache-superset flask-babel==1.0.0 # via flask-appbuilder @@ -101,8 +98,6 @@ flask-login==0.4.1 # via flask-appbuilder flask-migrate==3.1.0 # via apache-superset -flask-openid==1.3.0 - # via flask-appbuilder flask-sqlalchemy==2.5.1 # via # flask-appbuilder @@ -224,8 +219,6 @@ python-editor==1.0.4 # via alembic python-geohash==0.8.5 # via apache-superset -python3-openid==3.2.0 - # via flask-openid pytz==2021.3 # via # babel diff --git a/setup.py b/setup.py index de66eb2d78bdd..3f66d20eeb741 100644 --- a/setup.py +++ b/setup.py @@ -78,7 +78,7 @@ def get_git_sha() -> str: "cryptography>=3.3.2", "deprecation>=2.1.0, <2.2.0", "flask>=2.0.0, <3.0.0", - "flask-appbuilder==4.0.0rc1", + "flask-appbuilder==4.0.0rc3", "flask-caching>=1.10.0", "flask-compress", "flask-talisman", diff --git a/superset/config.py b/superset/config.py index a48745b1728f2..24648704c18e9 100644 --- a/superset/config.py +++ b/superset/config.py @@ -303,8 +303,6 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]: # { 'name': 'Yahoo', 'url': 'https://open.login.yahoo.com/' }, # { 'name': 'Flickr', 'url': 'https://www.flickr.com/' }, -AUTH_STRICT_RESPONSE_CODES = True - # --------------------------------------------------- # Roles config # --------------------------------------------------- diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 5663f828e1924..df67d8f775c52 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -752,6 +752,7 @@ def export(self, **kwargs: Any) -> Response: except DashboardNotFoundError: return self.response_404() buf.seek(0) + response = send_file( buf, mimetype="application/zip", From 1fd404052e0d79b0160fdd796cac30d8041e57bf Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Thu, 17 Mar 2022 17:32:39 +0000 Subject: [PATCH 14/17] update docs for new celery config keys --- docs/docs/installation/alerts-reports.mdx | 14 +++++++------- docs/docs/installation/async-queries-celery.mdx | 16 ++++++++-------- docs/docs/installation/cache.mdx | 10 +++++----- docs/docs/installation/running-on-kubernetes.mdx | 12 +++++------- 4 files changed, 25 insertions(+), 27 deletions(-) diff --git a/docs/docs/installation/alerts-reports.mdx b/docs/docs/installation/alerts-reports.mdx index 3ddb35caa1747..8ab37cc90529a 100644 --- a/docs/docs/installation/alerts-reports.mdx +++ b/docs/docs/installation/alerts-reports.mdx @@ -89,12 +89,12 @@ REDIS_HOST = "redis-superset" REDIS_PORT = "6379" class CeleryConfig: - BROKER_URL = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) - CELERY_IMPORTS = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", ) - CELERY_RESULT_BACKEND = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) - CELERYD_PREFETCH_MULTIPLIER = 10 - CELERY_ACKS_LATE = True - CELERY_ANNOTATIONS = { + broker_url = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) + imports = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", ) + result_backend = 'redis://%s:%s/0' % (REDIS_HOST, REDIS_PORT) + worker_prefetch_multiplier = 10 + task_acks_late = True + task_annotations = { 'sql_lab.get_sql_results': { 'rate_limit': '100/s', }, @@ -105,7 +105,7 @@ class CeleryConfig: 'ignore_result': True, }, } - CELERYBEAT_SCHEDULE = { + beat_schedule = { 'reports.scheduler': { 'task': 'reports.scheduler', 'schedule': crontab(minute='*', hour='*'), diff --git a/docs/docs/installation/async-queries-celery.mdx b/docs/docs/installation/async-queries-celery.mdx index 7c19a9405cf09..b742d8c6b52c2 100644 --- a/docs/docs/installation/async-queries-celery.mdx +++ b/docs/docs/installation/async-queries-celery.mdx @@ -23,16 +23,16 @@ and web server processes should have the same configuration. ```python class CeleryConfig(object): - BROKER_URL = 'redis://localhost:6379/0' - CELERY_IMPORTS = ( + broker_url = 'redis://localhost:6379/0' + imports = ( 'superset.sql_lab', 'superset.tasks', ) - CELERY_RESULT_BACKEND = 'redis://localhost:6379/0' - CELERYD_LOG_LEVEL = 'DEBUG' - CELERYD_PREFETCH_MULTIPLIER = 10 - CELERY_ACKS_LATE = True - CELERY_ANNOTATIONS = { + result_backend = 'redis://localhost:6379/0' + worker_log_level = 'DEBUG' + worker_prefetch_multiplier = 10 + task_acks_late = True + task_annotations = { 'sql_lab.get_sql_results': { 'rate_limit': '100/s', }, @@ -43,7 +43,7 @@ class CeleryConfig(object): 'ignore_result': True, }, } - CELERYBEAT_SCHEDULE = { + beat_schedule = { 'email_reports.schedule_hourly': { 'task': 'email_reports.schedule_hourly', 'schedule': crontab(minute=1, hour='*'), diff --git a/docs/docs/installation/cache.mdx b/docs/docs/installation/cache.mdx index e86382b3c16c4..81c98a7907920 100644 --- a/docs/docs/installation/cache.mdx +++ b/docs/docs/installation/cache.mdx @@ -78,11 +78,11 @@ from s3cache.s3cache import S3Cache ... class CeleryConfig(object): - BROKER_URL = "redis://localhost:6379/0" - CELERY_IMPORTS = ("superset.sql_lab", "superset.tasks", "superset.tasks.thumbnails") - CELERY_RESULT_BACKEND = "redis://localhost:6379/0" - CELERYD_PREFETCH_MULTIPLIER = 10 - CELERY_ACKS_LATE = True + broker_url = "redis://localhost:6379/0" + imports = ("superset.sql_lab", "superset.tasks", "superset.tasks.thumbnails") + result_backend = "redis://localhost:6379/0" + worker_prefetch_multiplier = 10 + task_acks_late = True CELERY_CONFIG = CeleryConfig diff --git a/docs/docs/installation/running-on-kubernetes.mdx b/docs/docs/installation/running-on-kubernetes.mdx index f879f2e6b5092..cde8280c10149 100644 --- a/docs/docs/installation/running-on-kubernetes.mdx +++ b/docs/docs/installation/running-on-kubernetes.mdx @@ -315,12 +315,10 @@ configOverrides: from celery.schedules import crontab class CeleryConfig(object): - BROKER_URL = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0" - CELERY_IMPORTS = ('superset.sql_lab', ) - CELERY_RESULT_BACKEND = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0" - CELERY_ANNOTATIONS = {'tasks.add': {'rate_limit': '10/s'}} - CELERY_IMPORTS = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", ) - CELERY_ANNOTATIONS = { + broker_url = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0" + imports = ('superset.sql_lab', "superset.tasks", "superset.tasks.thumbnails", ) + result_backend = f"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0" + task_annotations = { 'sql_lab.get_sql_results': { 'rate_limit': '100/s', }, @@ -331,7 +329,7 @@ configOverrides: 'ignore_result': True, }, } - CELERYBEAT_SCHEDULE = { + beat_schedule = { 'reports.scheduler': { 'task': 'reports.scheduler', 'schedule': crontab(minute='*', hour='*'), From bac916f793f9bdbb45bf85f5cfa29c6641fa1b5d Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Fri, 18 Mar 2022 11:14:14 +0000 Subject: [PATCH 15/17] downgrade celery to 5.2.2 --- requirements/base.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/base.txt b/requirements/base.txt index af23da35a67ad..9c50376f12694 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -33,7 +33,7 @@ brotli==1.0.9 # via flask-compress cachelib==0.4.1 # via apache-superset -celery==5.2.3 +celery==5.2.2 # via apache-superset cffi==1.14.6 # via cryptography From b9807ca3b96cb6abf5f9739eaf2af002739be5bd Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Mon, 21 Mar 2022 11:57:04 +0000 Subject: [PATCH 16/17] ref FAB to final 4.0.0 release --- requirements/base.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 9c50376f12694..fb16470fd399a 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -84,7 +84,7 @@ flask==2.0.3 # flask-migrate # flask-sqlalchemy # flask-wtf -flask-appbuilder==4.0.0rc3 +flask-appbuilder==4.0.0 # via apache-superset flask-babel==1.0.0 # via flask-appbuilder diff --git a/setup.py b/setup.py index 3f66d20eeb741..977d1b3e7ae63 100644 --- a/setup.py +++ b/setup.py @@ -78,7 +78,7 @@ def get_git_sha() -> str: "cryptography>=3.3.2", "deprecation>=2.1.0, <2.2.0", "flask>=2.0.0, <3.0.0", - "flask-appbuilder==4.0.0rc3", + "flask-appbuilder>=4.0.0, <5.0.0", "flask-caching>=1.10.0", "flask-compress", "flask-talisman", From add9a2bbd1085c2a608b36d1200192cc2e42cb97 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 23 Mar 2022 15:49:00 +0000 Subject: [PATCH 17/17] remove conflict left over --- UPDATING.md | 1 - 1 file changed, 1 deletion(-) diff --git a/UPDATING.md b/UPDATING.md index dd53d35c0dc4c..22674e49a3f2e 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -35,7 +35,6 @@ assists people when migrating to a new version. - [19049](https://github.com/apache/superset/pull/19049): APP_ICON_WIDTH has been removed from the config. Superset should now be able to handle different logo sizes without having to explicitly set an APP_ICON_WIDTH. This might affect the size of existing custom logos as the UI will now resize them according to the specified space of maximum 148px and not according to the value of APP_ICON_WIDTH. - [19274](https://github.com/apache/superset/pull/19274): The `PUBLIC_ROLE_LIKE_GAMMA` config key has been removed, set `PUBLIC_ROLE_LIKE` = "Gamma" to have the same functionality. - [19273](https://github.com/apache/superset/pull/19273): The `SUPERSET_CELERY_WORKERS` and `SUPERSET_WORKERS` config keys has been removed. Configure celery directly using `CELERY_CONFIG` on Superset ->>>>>>> master - [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case. - [17556](https://github.com/apache/superset/pull/17556): Bumps mysqlclient from v1 to v2 - [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` setting has moved from app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward.