From 91ee895f9e7a81388db6a26bb29e4823c2f75532 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Thu, 27 Apr 2023 09:30:12 -0700 Subject: [PATCH 01/31] First draft --- superset/cachekeys/api.py | 90 +++++++++++++++ superset/cachekeys/commands/__init__.py | 16 +++ superset/cachekeys/commands/exceptions.py | 44 +++++++ superset/cachekeys/commands/warm_up_cache.py | 114 +++++++++++++++++++ superset/tasks/cache.py | 2 + superset/views/core.py | 1 + tests/integration_tests/core_tests.py | 1 + tests/integration_tests/strategy_tests.py | 1 + 8 files changed, 269 insertions(+) create mode 100644 superset/cachekeys/commands/__init__.py create mode 100644 superset/cachekeys/commands/exceptions.py create mode 100644 superset/cachekeys/commands/warm_up_cache.py diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py index 78e680d524c57..5874e28eba2df 100644 --- a/superset/cachekeys/api.py +++ b/superset/cachekeys/api.py @@ -24,7 +24,9 @@ from marshmallow.exceptions import ValidationError from sqlalchemy.exc import SQLAlchemyError +from superset.cachekeys.commands.warm_up_cache import WarmUpCacheCommand from superset.cachekeys.schemas import CacheInvalidationRequestSchema +from superset.commands.exceptions import CommandException from superset.connectors.sqla.models import SqlaTable from superset.extensions import cache_manager, db, event_logger, stats_logger_manager from superset.models.cache import CacheKey @@ -40,10 +42,98 @@ class CacheRestApi(BaseSupersetModelRestApi): class_permission_name = "CacheRestApi" include_route_methods = { "invalidate", + "warm_up_cache", } openapi_spec_component_schemas = (CacheInvalidationRequestSchema,) + @expose("/warm_up_cache", methods=["GET"]) + @protect() + @safe + @statsd_metrics + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" + f".warm_up_cache", + log_to_statsd=False, + ) + def warm_up_cache(self) -> Response: + """Warms up the cache for the slice or table. + + Note for slices a force refresh occurs. + + In terms of the `extra_filters` these can be obtained from records in the JSON + encoded `logs.json` column associated with the `explore_json` action. + + --- + get: + description: >- + Warms up the cache for the slice or table + parameters: + - in: query + name: slice_id + schema: + type: integer + description: The ID of the chart to warm up cache for + - in: query + name: dashboard_id + schema: + type: integer + description: The ID of the dashboard to get filters for when warming cache + - in: query + name: table_name + schema: + type: string + description: The name of the table to warm up cache for + - in: query + name: db_name + schema: + type: string + description: The name of the database where the table is located + - in: query + name: extra_filters + schema: + type: string + description: Extra filters to apply when warming up cache + responses: + 200: + description: Each chart's warmup status + content: + application/json: + schema: + type: object + properties: + result: + type: array + items: + type: object + properties: + slice_id: + type: integer + viz_error: + type: string + viz_status: + type: string + 400: + $ref: '#/components/responses/400' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + slice_id = request.args.get("slice_id") + dashboard_id = request.args.get("dashboard_id") + table_name = request.args.get("table_name") + db_name = request.args.get("db_name") + extra_filters = request.args.get("extra_filters") + + try: + payload = WarmUpCacheCommand( + slice_id, dashboard_id, table_name, db_name, extra_filters + ).run() + return self.response(200, result=payload) + except CommandException as ex: + return self.response(ex.status, message=ex.message) + @expose("/invalidate", methods=["POST"]) @protect() @safe diff --git a/superset/cachekeys/commands/__init__.py b/superset/cachekeys/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/cachekeys/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/cachekeys/commands/exceptions.py b/superset/cachekeys/commands/exceptions.py new file mode 100644 index 0000000000000..dc4201f7adf24 --- /dev/null +++ b/superset/cachekeys/commands/exceptions.py @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from flask_babel import lazy_gettext as _ + +from superset.commands.exceptions import CommandException + + +class WarmUpCacheParametersExpectedError(CommandException): + status = 400 + message = _( + "Malformed request. slice_id or table_name " + "and db_name arguments are expected" + ) + + +class WarmUpCacheChartNotFoundError(CommandException): + def __init__(self, chart_id: int): + message = f"Chart {chart_id} not found" + super().__init__(message) + + status = 404 + + +class WarmUpCacheTableNotFoundError(CommandException): + def __init__(self, table_name: str, db_name: str): + message = f"Table {table_name} wasn't found in the database {db_name}" + super().__init__(message) + + status = 404 diff --git a/superset/cachekeys/commands/warm_up_cache.py b/superset/cachekeys/commands/warm_up_cache.py new file mode 100644 index 0000000000000..2fb45d0bad83b --- /dev/null +++ b/superset/cachekeys/commands/warm_up_cache.py @@ -0,0 +1,114 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +from typing import Any, Dict, List, Optional + +from flask import g +import simplejson as json + +from superset.cachekeys.commands.exceptions import ( + WarmUpCacheChartNotFoundError, + WarmUpCacheParametersExpectedError, + WarmUpCacheTableNotFoundError, +) +from superset.commands.base import BaseCommand +from superset.connectors.sqla.models import SqlaTable +from superset.extensions import db +from superset.models.core import Database +from superset.models.slice import Slice +from superset.utils.core import error_msg_from_exception +from superset.views.utils import get_dashboard_extra_filters, get_form_data, get_viz + + +class WarmUpCacheCommand(BaseCommand): + def __init__( + self, + slice_id: Optional[int], + dashboard_id: Optional[int], + table_name: Optional[str], + db_name: Optional[str], + extra_filters: Optional[str], + ): + self._slice_id = slice_id + self._dashboard_id = dashboard_id + self._table_name = table_name + self._db_name = db_name + self._extra_filters = extra_filters + self._slices: List[Slice] = [] + + def run(self) -> List[Dict[str, Any]]: + result: List[Dict[str, Any]] = [] + for slc in self._slices: + try: + form_data = get_form_data(slc.id, use_slice_data=True)[0] + if self._dashboard_id: + form_data["extra_filters"] = ( + json.loads(self._extra_filters) + if self._extra_filters + else get_dashboard_extra_filters(slc.id, self._dashboard_id) + ) + + if not slc.datasource: + raise Exception("Slice's datasource does not exist") + + obj = get_viz( + datasource_type=slc.datasource.type, + datasource_id=slc.datasource.id, + form_data=form_data, + force=True, + ) + + # pylint: disable=assigning-non-slot + g.form_data = form_data + payload = obj.get_payload() + delattr(g, "form_data") + error = payload["errors"] or None + status = payload["status"] + except Exception as ex: # pylint: disable=broad-except + error = error_msg_from_exception(ex) + status = None + + result.append( + {"slice_id": slc.id, "viz_error": error, "viz_status": status} + ) + + return result + + def validate(self) -> None: + if not self._slice_id and not (self._table_name and self._db_name): + raise WarmUpCacheParametersExpectedError() + if self._slice_id: + self._slices = db.session.query(Slice).filter_by(id=self._slice_id).all() + if not self._slices: + raise WarmUpCacheChartNotFoundError(self._slice_id) + elif self._table_name and self._db_name: + table = ( + db.session.query(SqlaTable) + .join(Database) + .filter( + Database.database_name == self._db_name, + SqlaTable.table_name == self._table_name, + ) + ).one_or_none() + if not table: + raise WarmUpCacheTableNotFoundError(self._table_name, self._db_name) + self._slices = ( + db.session.query(Slice) + .filter_by(datasource_id=table.id, datasource_type=table.type) + .all() + ) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index bdbf8add7eaba..d7d870c988bd7 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -40,6 +40,7 @@ def get_url(chart: Slice, dashboard: Optional[Dashboard] = None) -> str: """Return external URL for warming up a given chart/table cache.""" with app.test_request_context(): baseurl = "{WEBDRIVER_BASEURL}".format(**app.config) + #todome update with api v1 endpoint url = f"{baseurl}superset/warm_up_cache/?slice_id={chart.id}" if dashboard: url += f"&dashboard_id={dashboard.id}" @@ -51,6 +52,7 @@ class Strategy: # pylint: disable=too-few-public-methods A cache warm up strategy. Each strategy defines a `get_urls` method that returns a list of URLs to + # todome update with api v1 endpoint be fetched from the `/superset/warm_up_cache/` endpoint. Strategies can be configured in `superset/config.py`: diff --git a/superset/views/core.py b/superset/views/core.py index 774c045992c55..e8d841d9a33f5 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1697,6 +1697,7 @@ def fave_slices(self, user_id: Optional[int] = None) -> FlaskResponse: @api @has_access_api @expose("/warm_up_cache/", methods=["GET"]) + @deprecated(new_target="api/v1/cachekey/warm_up_cache") def warm_up_cache( # pylint: disable=too-many-locals,no-self-use self, ) -> FlaskResponse: diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index f036f18bf6aa3..b67af9028c5b9 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -580,6 +580,7 @@ def test_databaseview_edit(self, username="admin"): "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" ) def test_warm_up_cache(self): + # todome: port this test over to api v1 endpoint self.login() slc = self.get_slice("Girls", db.session) data = self.get_json_resp("/superset/warm_up_cache?slice_id={}".format(slc.id)) diff --git a/tests/integration_tests/strategy_tests.py b/tests/integration_tests/strategy_tests.py index e54ae865e3c15..043259f4bce68 100644 --- a/tests/integration_tests/strategy_tests.py +++ b/tests/integration_tests/strategy_tests.py @@ -79,6 +79,7 @@ def test_top_n_dashboards_strategy(self): strategy = TopNDashboardsStrategy(1) result = sorted(strategy.get_urls()) + # todome: update tests in this file w/ api v1 endpoint expected = sorted( [ f"{get_url_host()}superset/warm_up_cache/?slice_id={slc.id}&dashboard_id={dash.id}" From 51725d54fc155fc3746aeb3a40712fa54377d6eb Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Thu, 27 Apr 2023 09:58:52 -0700 Subject: [PATCH 02/31] Cleanup refs to old endpoint --- superset/tasks/cache.py | 6 +-- .../integration_tests/cachekeys/api_tests.py | 39 +++++++++++++++++++ tests/integration_tests/core_tests.py | 1 - tests/integration_tests/strategy_tests.py | 7 ++-- 4 files changed, 44 insertions(+), 9 deletions(-) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index d7d870c988bd7..5e93503e1783a 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -40,8 +40,7 @@ def get_url(chart: Slice, dashboard: Optional[Dashboard] = None) -> str: """Return external URL for warming up a given chart/table cache.""" with app.test_request_context(): baseurl = "{WEBDRIVER_BASEURL}".format(**app.config) - #todome update with api v1 endpoint - url = f"{baseurl}superset/warm_up_cache/?slice_id={chart.id}" + url = f"{baseurl}api/v1/cachekey/warm_up_cache/?slice_id={chart.id}" if dashboard: url += f"&dashboard_id={dashboard.id}" return url @@ -52,8 +51,7 @@ class Strategy: # pylint: disable=too-few-public-methods A cache warm up strategy. Each strategy defines a `get_urls` method that returns a list of URLs to - # todome update with api v1 endpoint - be fetched from the `/superset/warm_up_cache/` endpoint. + be fetched from the `/api/v1/cachekey/warm_up_cache` endpoint. Strategies can be configured in `superset/config.py`: diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py index d3552bfc8df26..6a9a363aabb13 100644 --- a/tests/integration_tests/cachekeys/api_tests.py +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -16,7 +16,9 @@ # under the License. # isort:skip_file """Unit tests for Superset""" +import json from typing import Dict, Any +from urllib.parse import quote import pytest @@ -27,6 +29,14 @@ SupersetTestCase, post_assert_metric, ) +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) @pytest.fixture @@ -165,3 +175,32 @@ def test_invalidate_existing_caches(invalidate): .datasource_uid == "X__table" ) + + +class TestWarmUpCache(SupersetTestCase): + @pytest.mark.usefixtures( + "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + ) + def test_warm_up_cache(self): + self.login() + slc = self.get_slice("Girls", db.session) + data = self.get_json_resp("/api/v1/cachekey/warm_up_cache?slice_id={}".format(slc.id)) + self.assertEqual( + data, [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + ) + + data = self.get_json_resp( + "/api/v1/cachekey/warm_up_cache?table_name=energy_usage&db_name=main" + ) + assert len(data) > 0 + + dashboard = self.get_dash_by_slug("births") + + assert self.get_json_resp( + f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}" + ) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + + assert self.get_json_resp( + f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}&extra_filters=" + + quote(json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}])) + ) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index b67af9028c5b9..f036f18bf6aa3 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -580,7 +580,6 @@ def test_databaseview_edit(self, username="admin"): "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" ) def test_warm_up_cache(self): - # todome: port this test over to api v1 endpoint self.login() slc = self.get_slice("Girls", db.session) data = self.get_json_resp("/superset/warm_up_cache?slice_id={}".format(slc.id)) diff --git a/tests/integration_tests/strategy_tests.py b/tests/integration_tests/strategy_tests.py index 043259f4bce68..68afcb8882238 100644 --- a/tests/integration_tests/strategy_tests.py +++ b/tests/integration_tests/strategy_tests.py @@ -79,10 +79,9 @@ def test_top_n_dashboards_strategy(self): strategy = TopNDashboardsStrategy(1) result = sorted(strategy.get_urls()) - # todome: update tests in this file w/ api v1 endpoint expected = sorted( [ - f"{get_url_host()}superset/warm_up_cache/?slice_id={slc.id}&dashboard_id={dash.id}" + f"{get_url_host()}api/v1/cachekey/warm_up_cache/?slice_id={slc.id}&dashboard_id={dash.id}" for slc in dash.slices ] ) @@ -113,7 +112,7 @@ def test_dashboard_tags(self): dash = self.get_dash_by_slug("births") tag1_urls = sorted( [ - f"{get_url_host()}superset/warm_up_cache/?slice_id={slc.id}" + f"{get_url_host()}api/v1/cachekey/warm_up_cache/?slice_id={slc.id}" for slc in dash.slices ] ) @@ -136,7 +135,7 @@ def test_dashboard_tags(self): # tag first slice dash = self.get_dash_by_slug("unicode-test") slc = dash.slices[0] - tag2_urls = [f"{get_url_host()}superset/warm_up_cache/?slice_id={slc.id}"] + tag2_urls = [f"{get_url_host()}api/v1/cachekey/warm_up_cache/?slice_id={slc.id}"] object_id = slc.id tagged_object = TaggedObject( tag_id=tag2.id, object_id=object_id, object_type=ObjectTypes.chart From ea265bf077b345b3ded2cc51b948117cf7dc7a70 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Thu, 27 Apr 2023 13:31:07 -0700 Subject: [PATCH 03/31] Fix test --- superset/cachekeys/commands/warm_up_cache.py | 1 + tests/integration_tests/cachekeys/api_tests.py | 10 ++++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/superset/cachekeys/commands/warm_up_cache.py b/superset/cachekeys/commands/warm_up_cache.py index 2fb45d0bad83b..9dad940ee128d 100644 --- a/superset/cachekeys/commands/warm_up_cache.py +++ b/superset/cachekeys/commands/warm_up_cache.py @@ -52,6 +52,7 @@ def __init__( self._slices: List[Slice] = [] def run(self) -> List[Dict[str, Any]]: + self.validate() result: List[Dict[str, Any]] = [] for slc in self._slices: try: diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py index 6a9a363aabb13..f826129deb589 100644 --- a/tests/integration_tests/cachekeys/api_tests.py +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -25,6 +25,7 @@ from superset.extensions import cache_manager, db from superset.models.cache import CacheKey from superset.utils.core import get_example_default_schema +from superset.utils.database import get_example_database from tests.integration_tests.base_tests import ( SupersetTestCase, post_assert_metric, @@ -186,11 +187,12 @@ def test_warm_up_cache(self): slc = self.get_slice("Girls", db.session) data = self.get_json_resp("/api/v1/cachekey/warm_up_cache?slice_id={}".format(slc.id)) self.assertEqual( - data, [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + data["result"], [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] ) data = self.get_json_resp( - "/api/v1/cachekey/warm_up_cache?table_name=energy_usage&db_name=main" + "/api/v1/cachekey/warm_up_cache?table_name=energy_usage" + f"&db_name={get_example_database().database_name}" ) assert len(data) > 0 @@ -198,9 +200,9 @@ def test_warm_up_cache(self): assert self.get_json_resp( f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}" - ) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + )["result"] == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] assert self.get_json_resp( f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}&extra_filters=" + quote(json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}])) - ) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + )["result"] == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] From db02b9cb8b9b2f882fab8916a533df2476ea7e1c Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Thu, 27 Apr 2023 13:48:36 -0700 Subject: [PATCH 04/31] Fix lint --- superset/cachekeys/commands/warm_up_cache.py | 1 + 1 file changed, 1 insertion(+) diff --git a/superset/cachekeys/commands/warm_up_cache.py b/superset/cachekeys/commands/warm_up_cache.py index 9dad940ee128d..0a7b61c677d68 100644 --- a/superset/cachekeys/commands/warm_up_cache.py +++ b/superset/cachekeys/commands/warm_up_cache.py @@ -36,6 +36,7 @@ class WarmUpCacheCommand(BaseCommand): + # pylint: disable=too-many-arguments def __init__( self, slice_id: Optional[int], From 698e2d633a8c65320107eec52de95983c8b9b974 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Thu, 27 Apr 2023 17:15:14 -0700 Subject: [PATCH 05/31] Fix formatting --- tests/integration_tests/cachekeys/api_tests.py | 15 +++++++++++---- tests/integration_tests/strategy_tests.py | 4 +++- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py index f826129deb589..4c1dfd819c944 100644 --- a/tests/integration_tests/cachekeys/api_tests.py +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -185,9 +185,12 @@ class TestWarmUpCache(SupersetTestCase): def test_warm_up_cache(self): self.login() slc = self.get_slice("Girls", db.session) - data = self.get_json_resp("/api/v1/cachekey/warm_up_cache?slice_id={}".format(slc.id)) + data = self.get_json_resp( + "/api/v1/cachekey/warm_up_cache?slice_id={}".format(slc.id) + ) self.assertEqual( - data["result"], [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + data["result"], + [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}], ) data = self.get_json_resp( @@ -200,9 +203,13 @@ def test_warm_up_cache(self): assert self.get_json_resp( f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}" - )["result"] == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + )["result"] == [ + {"slice_id": slc.id, "viz_error": None, "viz_status": "success"} + ] assert self.get_json_resp( f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}&extra_filters=" + quote(json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}])) - )["result"] == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + )["result"] == [ + {"slice_id": slc.id, "viz_error": None, "viz_status": "success"} + ] diff --git a/tests/integration_tests/strategy_tests.py b/tests/integration_tests/strategy_tests.py index 68afcb8882238..db5ed3bb2fb02 100644 --- a/tests/integration_tests/strategy_tests.py +++ b/tests/integration_tests/strategy_tests.py @@ -135,7 +135,9 @@ def test_dashboard_tags(self): # tag first slice dash = self.get_dash_by_slug("unicode-test") slc = dash.slices[0] - tag2_urls = [f"{get_url_host()}api/v1/cachekey/warm_up_cache/?slice_id={slc.id}"] + tag2_urls = [ + f"{get_url_host()}api/v1/cachekey/warm_up_cache/?slice_id={slc.id}" + ] object_id = slc.id tagged_object = TaggedObject( tag_id=tag2.id, object_id=object_id, object_type=ObjectTypes.chart From 5bf548f79ab4b807565a6c4244b85bf305c95a58 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 28 Apr 2023 09:32:26 -0700 Subject: [PATCH 06/31] Run isort --- superset/cachekeys/commands/warm_up_cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset/cachekeys/commands/warm_up_cache.py b/superset/cachekeys/commands/warm_up_cache.py index 0a7b61c677d68..a421bfd546b0d 100644 --- a/superset/cachekeys/commands/warm_up_cache.py +++ b/superset/cachekeys/commands/warm_up_cache.py @@ -18,8 +18,8 @@ from typing import Any, Dict, List, Optional -from flask import g import simplejson as json +from flask import g from superset.cachekeys.commands.exceptions import ( WarmUpCacheChartNotFoundError, From 778abd808ed98250c5d8105aaebe506e4c8bb277 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 08:36:20 -0700 Subject: [PATCH 07/31] slice -> chart --- superset/cachekeys/api.py | 8 ++--- superset/cachekeys/commands/warm_up_cache.py | 32 +++++++++---------- .../integration_tests/cachekeys/api_tests.py | 12 +++---- 3 files changed, 26 insertions(+), 26 deletions(-) diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py index 5874e28eba2df..ab519d7c9fc48 100644 --- a/superset/cachekeys/api.py +++ b/superset/cachekeys/api.py @@ -70,7 +70,7 @@ def warm_up_cache(self) -> Response: Warms up the cache for the slice or table parameters: - in: query - name: slice_id + name: chart_id schema: type: integer description: The ID of the chart to warm up cache for @@ -107,7 +107,7 @@ def warm_up_cache(self) -> Response: items: type: object properties: - slice_id: + chart_id: type: integer viz_error: type: string @@ -120,7 +120,7 @@ def warm_up_cache(self) -> Response: 500: $ref: '#/components/responses/500' """ - slice_id = request.args.get("slice_id") + chart_id = request.args.get("chart_id") dashboard_id = request.args.get("dashboard_id") table_name = request.args.get("table_name") db_name = request.args.get("db_name") @@ -128,7 +128,7 @@ def warm_up_cache(self) -> Response: try: payload = WarmUpCacheCommand( - slice_id, dashboard_id, table_name, db_name, extra_filters + chart_id, dashboard_id, table_name, db_name, extra_filters ).run() return self.response(200, result=payload) except CommandException as ex: diff --git a/superset/cachekeys/commands/warm_up_cache.py b/superset/cachekeys/commands/warm_up_cache.py index a421bfd546b0d..2e608507bd35f 100644 --- a/superset/cachekeys/commands/warm_up_cache.py +++ b/superset/cachekeys/commands/warm_up_cache.py @@ -39,38 +39,38 @@ class WarmUpCacheCommand(BaseCommand): # pylint: disable=too-many-arguments def __init__( self, - slice_id: Optional[int], + chart_id: Optional[int], dashboard_id: Optional[int], table_name: Optional[str], db_name: Optional[str], extra_filters: Optional[str], ): - self._slice_id = slice_id + self._chart_id = chart_id self._dashboard_id = dashboard_id self._table_name = table_name self._db_name = db_name self._extra_filters = extra_filters - self._slices: List[Slice] = [] + self._charts: List[Slice] = [] def run(self) -> List[Dict[str, Any]]: self.validate() result: List[Dict[str, Any]] = [] - for slc in self._slices: + for chart in self._charts: try: - form_data = get_form_data(slc.id, use_slice_data=True)[0] + form_data = get_form_data(chart.id, use_slice_data=True)[0] if self._dashboard_id: form_data["extra_filters"] = ( json.loads(self._extra_filters) if self._extra_filters - else get_dashboard_extra_filters(slc.id, self._dashboard_id) + else get_dashboard_extra_filters(chart.id, self._dashboard_id) ) - if not slc.datasource: + if not chart.datasource: raise Exception("Slice's datasource does not exist") obj = get_viz( - datasource_type=slc.datasource.type, - datasource_id=slc.datasource.id, + datasource_type=chart.datasource.type, + datasource_id=chart.datasource.id, form_data=form_data, force=True, ) @@ -86,18 +86,18 @@ def run(self) -> List[Dict[str, Any]]: status = None result.append( - {"slice_id": slc.id, "viz_error": error, "viz_status": status} + {"chart_id": chart.id, "viz_error": error, "viz_status": status} ) return result def validate(self) -> None: - if not self._slice_id and not (self._table_name and self._db_name): + if not self._chart_id and not (self._table_name and self._db_name): raise WarmUpCacheParametersExpectedError() - if self._slice_id: - self._slices = db.session.query(Slice).filter_by(id=self._slice_id).all() - if not self._slices: - raise WarmUpCacheChartNotFoundError(self._slice_id) + if self._chart_id: + self._charts = db.session.query(Slice).filter_by(id=self._chart_id).all() + if not self._charts: + raise WarmUpCacheChartNotFoundError(self._chart_id) elif self._table_name and self._db_name: table = ( db.session.query(SqlaTable) @@ -109,7 +109,7 @@ def validate(self) -> None: ).one_or_none() if not table: raise WarmUpCacheTableNotFoundError(self._table_name, self._db_name) - self._slices = ( + self._charts = ( db.session.query(Slice) .filter_by(datasource_id=table.id, datasource_type=table.type) .all() diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py index 4c1dfd819c944..10ccd54bc6b31 100644 --- a/tests/integration_tests/cachekeys/api_tests.py +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -186,11 +186,11 @@ def test_warm_up_cache(self): self.login() slc = self.get_slice("Girls", db.session) data = self.get_json_resp( - "/api/v1/cachekey/warm_up_cache?slice_id={}".format(slc.id) + "/api/v1/cachekey/warm_up_cache?chart_id={}".format(slc.id) ) self.assertEqual( data["result"], - [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}], + [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], ) data = self.get_json_resp( @@ -202,14 +202,14 @@ def test_warm_up_cache(self): dashboard = self.get_dash_by_slug("births") assert self.get_json_resp( - f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}" + f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&chart_id={slc.id}" )["result"] == [ - {"slice_id": slc.id, "viz_error": None, "viz_status": "success"} + {"chart_id": slc.id, "viz_error": None, "viz_status": "success"} ] assert self.get_json_resp( - f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}&extra_filters=" + f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&chart_id={slc.id}&extra_filters=" + quote(json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}])) )["result"] == [ - {"slice_id": slc.id, "viz_error": None, "viz_status": "success"} + {"chart_id": slc.id, "viz_error": None, "viz_status": "success"} ] From 3aa8ac9cb44fd3a62657f8f5e9c331e2f67ade9a Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 08:50:43 -0700 Subject: [PATCH 08/31] fix desc and summary --- superset/cachekeys/api.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py index ab519d7c9fc48..7053455da4799 100644 --- a/superset/cachekeys/api.py +++ b/superset/cachekeys/api.py @@ -57,17 +57,16 @@ class CacheRestApi(BaseSupersetModelRestApi): log_to_statsd=False, ) def warm_up_cache(self) -> Response: - """Warms up the cache for the slice or table. - - Note for slices a force refresh occurs. - - In terms of the `extra_filters` these can be obtained from records in the JSON - encoded `logs.json` column associated with the `explore_json` action. - + """ --- get: - description: >- + summary: >- Warms up the cache for the slice or table + description: >- + Warms up the cache for the slice or table. + Note for slices a force refresh occurs. + In terms of the `extra_filters` these can be obtained from records in the JSON + encoded `logs.json` column associated with the `explore_json` action. parameters: - in: query name: chart_id From 5b583bc97792c19f2fcb7c91fc9fe88a9de6cbba Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 08:52:41 -0700 Subject: [PATCH 09/31] No user-provided input in errors --- superset/cachekeys/commands/exceptions.py | 10 ++-------- superset/cachekeys/commands/warm_up_cache.py | 4 ++-- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/superset/cachekeys/commands/exceptions.py b/superset/cachekeys/commands/exceptions.py index dc4201f7adf24..84acfb5928299 100644 --- a/superset/cachekeys/commands/exceptions.py +++ b/superset/cachekeys/commands/exceptions.py @@ -29,16 +29,10 @@ class WarmUpCacheParametersExpectedError(CommandException): class WarmUpCacheChartNotFoundError(CommandException): - def __init__(self, chart_id: int): - message = f"Chart {chart_id} not found" - super().__init__(message) - status = 404 + message = _("Chart not found") class WarmUpCacheTableNotFoundError(CommandException): - def __init__(self, table_name: str, db_name: str): - message = f"Table {table_name} wasn't found in the database {db_name}" - super().__init__(message) - status = 404 + message = _("The provided table was not found in the provided database") diff --git a/superset/cachekeys/commands/warm_up_cache.py b/superset/cachekeys/commands/warm_up_cache.py index 2e608507bd35f..6416fffcb4f00 100644 --- a/superset/cachekeys/commands/warm_up_cache.py +++ b/superset/cachekeys/commands/warm_up_cache.py @@ -97,7 +97,7 @@ def validate(self) -> None: if self._chart_id: self._charts = db.session.query(Slice).filter_by(id=self._chart_id).all() if not self._charts: - raise WarmUpCacheChartNotFoundError(self._chart_id) + raise WarmUpCacheChartNotFoundError() elif self._table_name and self._db_name: table = ( db.session.query(SqlaTable) @@ -108,7 +108,7 @@ def validate(self) -> None: ) ).one_or_none() if not table: - raise WarmUpCacheTableNotFoundError(self._table_name, self._db_name) + raise WarmUpCacheTableNotFoundError() self._charts = ( db.session.query(Slice) .filter_by(datasource_id=table.id, datasource_type=table.type) From 9cec54f2b0f31a5736e501c33150ad5f58ab55d2 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 10:17:58 -0700 Subject: [PATCH 10/31] get -> put and move query params to json payload --- superset/cachekeys/api.py | 70 ++++++------------- superset/cachekeys/schemas.py | 18 +++++ .../integration_tests/cachekeys/api_tests.py | 45 ++++++------ 3 files changed, 62 insertions(+), 71 deletions(-) diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py index 7053455da4799..d982cda587d74 100644 --- a/superset/cachekeys/api.py +++ b/superset/cachekeys/api.py @@ -25,7 +25,7 @@ from sqlalchemy.exc import SQLAlchemyError from superset.cachekeys.commands.warm_up_cache import WarmUpCacheCommand -from superset.cachekeys.schemas import CacheInvalidationRequestSchema +from superset.cachekeys.schemas import CacheInvalidationRequestSchema, CacheWarmUpRequestSchema, CacheWarmUpResponseSchema from superset.commands.exceptions import CommandException from superset.connectors.sqla.models import SqlaTable from superset.extensions import cache_manager, db, event_logger, stats_logger_manager @@ -45,9 +45,9 @@ class CacheRestApi(BaseSupersetModelRestApi): "warm_up_cache", } - openapi_spec_component_schemas = (CacheInvalidationRequestSchema,) + openapi_spec_component_schemas = (CacheInvalidationRequestSchema, CacheWarmUpRequestSchema, CacheWarmUpResponseSchema) - @expose("/warm_up_cache", methods=["GET"]) + @expose("/warm_up_cache", methods=["PUT"]) @protect() @safe @statsd_metrics @@ -59,7 +59,7 @@ class CacheRestApi(BaseSupersetModelRestApi): def warm_up_cache(self) -> Response: """ --- - get: + put: summary: >- Warms up the cache for the slice or table description: >- @@ -67,51 +67,23 @@ def warm_up_cache(self) -> Response: Note for slices a force refresh occurs. In terms of the `extra_filters` these can be obtained from records in the JSON encoded `logs.json` column associated with the `explore_json` action. - parameters: - - in: query - name: chart_id - schema: - type: integer - description: The ID of the chart to warm up cache for - - in: query - name: dashboard_id - schema: - type: integer - description: The ID of the dashboard to get filters for when warming cache - - in: query - name: table_name - schema: - type: string - description: The name of the table to warm up cache for - - in: query - name: db_name - schema: - type: string - description: The name of the database where the table is located - - in: query - name: extra_filters - schema: - type: string - description: Extra filters to apply when warming up cache + requestBody: + description: >- + Identifies charts to warm up cache for, and any additional dashboard or + filter context to use. Either a chart id or a table name and a database + name can be passed. + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/CacheWarmUpRequestSchema" responses: 200: description: Each chart's warmup status content: application/json: schema: - type: object - properties: - result: - type: array - items: - type: object - properties: - chart_id: - type: integer - viz_error: - type: string - viz_status: - type: string + $ref: "#/components/schemas/CacheWarmUpResponseSchema" 400: $ref: '#/components/responses/400' 404: @@ -119,15 +91,13 @@ def warm_up_cache(self) -> Response: 500: $ref: '#/components/responses/500' """ - chart_id = request.args.get("chart_id") - dashboard_id = request.args.get("dashboard_id") - table_name = request.args.get("table_name") - db_name = request.args.get("db_name") - extra_filters = request.args.get("extra_filters") - + try: + body = CacheWarmUpRequestSchema().load(request.json) + except ValidationError as error: + return self.response_400(message=error.messages) try: payload = WarmUpCacheCommand( - chart_id, dashboard_id, table_name, db_name, extra_filters + body.get("chart_id"), body.get("dashboard_id"), body.get("table_name"), body.get("db_name"), body.get("extra_filters") ).run() return self.response(200, result=payload) except CommandException as ex: diff --git a/superset/cachekeys/schemas.py b/superset/cachekeys/schemas.py index e58a45ac565b9..d94e1d2e82a11 100644 --- a/superset/cachekeys/schemas.py +++ b/superset/cachekeys/schemas.py @@ -51,3 +51,21 @@ class CacheInvalidationRequestSchema(Schema): fields.Nested(Datasource), metadata={"description": "A list of the data source and database names"}, ) + + +class CacheWarmUpRequestSchema(Schema): + chart_id = fields.Integer(metadata={"description": "The ID of the chart to warm up cache for"}) + dashboard_id = fields.Integer(metadata={"description": "The ID of the dashboard to get filters for when warming cache"}) + table_name = fields.String(metadata={"description": "The name of the table to warm up cache for"}) + db_name = fields.String(metadata={"description": "The name of the database where the table is located"}) + extra_filters = fields.String(metadata={"description": "Extra filters to apply when warming up cache"}) + + +class CacheWarmUpResponseSingleSchema(Schema): + chart_id = fields.Integer(metadata={"description": "The ID of the chart the status belongs to"}) + viz_error = fields.String(metadata={"description": "Error that occurred when warming cache for chart"}) + viz_status = fields.String(metadata={"description": "Status of the underlying query for the viz"}) + + +class CacheWarmUpResponseSchema(Schema): + result = fields.List(fields.Nested(CacheWarmUpResponseSingleSchema), metadata={"description": "A list of each chart's warmup status and errors if any"}) diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py index 10ccd54bc6b31..a9ad3b1c63670 100644 --- a/tests/integration_tests/cachekeys/api_tests.py +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -18,7 +18,6 @@ """Unit tests for Superset""" import json from typing import Dict, Any -from urllib.parse import quote import pytest @@ -185,31 +184,35 @@ class TestWarmUpCache(SupersetTestCase): def test_warm_up_cache(self): self.login() slc = self.get_slice("Girls", db.session) - data = self.get_json_resp( - "/api/v1/cachekey/warm_up_cache?chart_id={}".format(slc.id) - ) + rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": slc.id}) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( data["result"], [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], ) - data = self.get_json_resp( - "/api/v1/cachekey/warm_up_cache?table_name=energy_usage" - f"&db_name={get_example_database().database_name}" - ) - assert len(data) > 0 + rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"table_name": "energy_usage", "db_name": get_example_database().database_name}) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + + assert len(data["result"]) > 0 dashboard = self.get_dash_by_slug("births") - assert self.get_json_resp( - f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&chart_id={slc.id}" - )["result"] == [ - {"chart_id": slc.id, "viz_error": None, "viz_status": "success"} - ] - - assert self.get_json_resp( - f"/api/v1/cachekey/warm_up_cache?dashboard_id={dashboard.id}&chart_id={slc.id}&extra_filters=" - + quote(json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}])) - )["result"] == [ - {"chart_id": slc.id, "viz_error": None, "viz_status": "success"} - ] + rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": slc.id, "dashboard_id": dashboard.id}) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data["result"], + [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], + ) + + rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": slc.id, "dashboard_id": dashboard.id, "extra_filters": json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}])}) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data["result"], + [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], + ) From f08d7bdf509f06adef4dd170e9e5ec990e95c6e9 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 10:56:05 -0700 Subject: [PATCH 11/31] Add api/commands tests --- .../integration_tests/cachekeys/api_tests.py | 28 ++++++++++ .../cachekeys/commands_tests.py | 52 +++++++++++++++++++ 2 files changed, 80 insertions(+) create mode 100644 tests/integration_tests/cachekeys/commands_tests.py diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py index a9ad3b1c63670..84f6036286714 100644 --- a/tests/integration_tests/cachekeys/api_tests.py +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -216,3 +216,31 @@ def test_warm_up_cache(self): data["result"], [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], ) + + def test_warm_up_cache_required_params_missing(self): + self.login() + rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"dashboard_id": 1}) + self.assertEqual(rv.status_code, 400) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data, {"message": "Malformed request. slice_id or table_name and db_name arguments are expected"}) + + def test_warm_up_cache_chart_not_found(self): + self.login() + rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": 99999}) + self.assertEqual(rv.status_code, 404) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data, {"message": "Chart not found"}) + + def test_warm_up_cache_table_not_found(self): + self.login() + rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"table_name": "not_here", "db_name": "abc"}) + self.assertEqual(rv.status_code, 404) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data, {"message": "The provided table was not found in the provided database"}) + + def test_warm_up_cache_payload_validation(self): + self.login() + rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": "id", "table_name": 2, "db_name": 4}) + self.assertEqual(rv.status_code, 400) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data, {"message": {"chart_id": ["Not a valid integer."], "db_name": ["Not a valid string."], "table_name": ["Not a valid string."]}}) diff --git a/tests/integration_tests/cachekeys/commands_tests.py b/tests/integration_tests/cachekeys/commands_tests.py new file mode 100644 index 0000000000000..c40d73b69a76e --- /dev/null +++ b/tests/integration_tests/cachekeys/commands_tests.py @@ -0,0 +1,52 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.cachekeys.commands.warm_up_cache import WarmUpCacheCommand +from superset.cachekeys.commands.exceptions import WarmUpCacheChartNotFoundError, WarmUpCacheParametersExpectedError, WarmUpCacheTableNotFoundError +from superset.extensions import db +from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_with_slice, + load_energy_table_data, +) + + +class TestWarmUpCacheCommand(SupersetTestCase): + def test_warm_up_cache_command_required_params_missing(self): + with self.assertRaises(WarmUpCacheParametersExpectedError): + WarmUpCacheCommand(None, 1, None, None, None).run() + + def test_warm_up_cache_command_chart_not_found(self): + with self.assertRaises(WarmUpCacheChartNotFoundError): + WarmUpCacheCommand(99999, None, None, None, None).run() + + def test_warm_up_cache_command_table_not_found(self): + with self.assertRaises(WarmUpCacheTableNotFoundError): + WarmUpCacheCommand(None, None, "not_here", "abc", None).run() + + @pytest.mark.usefixtures( + "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + ) + def test_warm_up_cache(self): + slc = self.get_slice("Girls", db.session) + result = WarmUpCacheCommand(slc.id, None, None, None, None).run() + self.assertEqual(result, [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}]) From 213a72a58b63068844820d46636104a99182a820 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 10:57:22 -0700 Subject: [PATCH 12/31] Formatting --- superset/cachekeys/api.py | 18 +++++- superset/cachekeys/schemas.py | 41 ++++++++++--- .../integration_tests/cachekeys/api_tests.py | 61 ++++++++++++++++--- .../cachekeys/commands_tests.py | 10 ++- 4 files changed, 107 insertions(+), 23 deletions(-) diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py index d982cda587d74..6f236c7344356 100644 --- a/superset/cachekeys/api.py +++ b/superset/cachekeys/api.py @@ -25,7 +25,11 @@ from sqlalchemy.exc import SQLAlchemyError from superset.cachekeys.commands.warm_up_cache import WarmUpCacheCommand -from superset.cachekeys.schemas import CacheInvalidationRequestSchema, CacheWarmUpRequestSchema, CacheWarmUpResponseSchema +from superset.cachekeys.schemas import ( + CacheInvalidationRequestSchema, + CacheWarmUpRequestSchema, + CacheWarmUpResponseSchema, +) from superset.commands.exceptions import CommandException from superset.connectors.sqla.models import SqlaTable from superset.extensions import cache_manager, db, event_logger, stats_logger_manager @@ -45,7 +49,11 @@ class CacheRestApi(BaseSupersetModelRestApi): "warm_up_cache", } - openapi_spec_component_schemas = (CacheInvalidationRequestSchema, CacheWarmUpRequestSchema, CacheWarmUpResponseSchema) + openapi_spec_component_schemas = ( + CacheInvalidationRequestSchema, + CacheWarmUpRequestSchema, + CacheWarmUpResponseSchema, + ) @expose("/warm_up_cache", methods=["PUT"]) @protect() @@ -97,7 +105,11 @@ def warm_up_cache(self) -> Response: return self.response_400(message=error.messages) try: payload = WarmUpCacheCommand( - body.get("chart_id"), body.get("dashboard_id"), body.get("table_name"), body.get("db_name"), body.get("extra_filters") + body.get("chart_id"), + body.get("dashboard_id"), + body.get("table_name"), + body.get("db_name"), + body.get("extra_filters"), ).run() return self.response(200, result=payload) except CommandException as ex: diff --git a/superset/cachekeys/schemas.py b/superset/cachekeys/schemas.py index d94e1d2e82a11..8682daf3d733d 100644 --- a/superset/cachekeys/schemas.py +++ b/superset/cachekeys/schemas.py @@ -54,18 +54,41 @@ class CacheInvalidationRequestSchema(Schema): class CacheWarmUpRequestSchema(Schema): - chart_id = fields.Integer(metadata={"description": "The ID of the chart to warm up cache for"}) - dashboard_id = fields.Integer(metadata={"description": "The ID of the dashboard to get filters for when warming cache"}) - table_name = fields.String(metadata={"description": "The name of the table to warm up cache for"}) - db_name = fields.String(metadata={"description": "The name of the database where the table is located"}) - extra_filters = fields.String(metadata={"description": "Extra filters to apply when warming up cache"}) + chart_id = fields.Integer( + metadata={"description": "The ID of the chart to warm up cache for"} + ) + dashboard_id = fields.Integer( + metadata={ + "description": "The ID of the dashboard to get filters for when warming cache" + } + ) + table_name = fields.String( + metadata={"description": "The name of the table to warm up cache for"} + ) + db_name = fields.String( + metadata={"description": "The name of the database where the table is located"} + ) + extra_filters = fields.String( + metadata={"description": "Extra filters to apply when warming up cache"} + ) class CacheWarmUpResponseSingleSchema(Schema): - chart_id = fields.Integer(metadata={"description": "The ID of the chart the status belongs to"}) - viz_error = fields.String(metadata={"description": "Error that occurred when warming cache for chart"}) - viz_status = fields.String(metadata={"description": "Status of the underlying query for the viz"}) + chart_id = fields.Integer( + metadata={"description": "The ID of the chart the status belongs to"} + ) + viz_error = fields.String( + metadata={"description": "Error that occurred when warming cache for chart"} + ) + viz_status = fields.String( + metadata={"description": "Status of the underlying query for the viz"} + ) class CacheWarmUpResponseSchema(Schema): - result = fields.List(fields.Nested(CacheWarmUpResponseSingleSchema), metadata={"description": "A list of each chart's warmup status and errors if any"}) + result = fields.List( + fields.Nested(CacheWarmUpResponseSingleSchema), + metadata={ + "description": "A list of each chart's warmup status and errors if any" + }, + ) diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py index 84f6036286714..1a3bc908901cc 100644 --- a/tests/integration_tests/cachekeys/api_tests.py +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -184,7 +184,9 @@ class TestWarmUpCache(SupersetTestCase): def test_warm_up_cache(self): self.login() slc = self.get_slice("Girls", db.session) - rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": slc.id}) + rv = self.client.put( + "/api/v1/cachekey/warm_up_cache", json={"chart_id": slc.id} + ) self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) @@ -193,7 +195,13 @@ def test_warm_up_cache(self): [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], ) - rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"table_name": "energy_usage", "db_name": get_example_database().database_name}) + rv = self.client.put( + "/api/v1/cachekey/warm_up_cache", + json={ + "table_name": "energy_usage", + "db_name": get_example_database().database_name, + }, + ) self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) @@ -201,7 +209,10 @@ def test_warm_up_cache(self): dashboard = self.get_dash_by_slug("births") - rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": slc.id, "dashboard_id": dashboard.id}) + rv = self.client.put( + "/api/v1/cachekey/warm_up_cache", + json={"chart_id": slc.id, "dashboard_id": dashboard.id}, + ) self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) self.assertEqual( @@ -209,7 +220,16 @@ def test_warm_up_cache(self): [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], ) - rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": slc.id, "dashboard_id": dashboard.id, "extra_filters": json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}])}) + rv = self.client.put( + "/api/v1/cachekey/warm_up_cache", + json={ + "chart_id": slc.id, + "dashboard_id": dashboard.id, + "extra_filters": json.dumps( + [{"col": "name", "op": "in", "val": ["Jennifer"]}] + ), + }, + ) self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) self.assertEqual( @@ -222,7 +242,12 @@ def test_warm_up_cache_required_params_missing(self): rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"dashboard_id": 1}) self.assertEqual(rv.status_code, 400) data = json.loads(rv.data.decode("utf-8")) - self.assertEqual(data, {"message": "Malformed request. slice_id or table_name and db_name arguments are expected"}) + self.assertEqual( + data, + { + "message": "Malformed request. slice_id or table_name and db_name arguments are expected" + }, + ) def test_warm_up_cache_chart_not_found(self): self.login() @@ -233,14 +258,32 @@ def test_warm_up_cache_chart_not_found(self): def test_warm_up_cache_table_not_found(self): self.login() - rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"table_name": "not_here", "db_name": "abc"}) + rv = self.client.put( + "/api/v1/cachekey/warm_up_cache", + json={"table_name": "not_here", "db_name": "abc"}, + ) self.assertEqual(rv.status_code, 404) data = json.loads(rv.data.decode("utf-8")) - self.assertEqual(data, {"message": "The provided table was not found in the provided database"}) + self.assertEqual( + data, + {"message": "The provided table was not found in the provided database"}, + ) def test_warm_up_cache_payload_validation(self): self.login() - rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": "id", "table_name": 2, "db_name": 4}) + rv = self.client.put( + "/api/v1/cachekey/warm_up_cache", + json={"chart_id": "id", "table_name": 2, "db_name": 4}, + ) self.assertEqual(rv.status_code, 400) data = json.loads(rv.data.decode("utf-8")) - self.assertEqual(data, {"message": {"chart_id": ["Not a valid integer."], "db_name": ["Not a valid string."], "table_name": ["Not a valid string."]}}) + self.assertEqual( + data, + { + "message": { + "chart_id": ["Not a valid integer."], + "db_name": ["Not a valid string."], + "table_name": ["Not a valid string."], + } + }, + ) diff --git a/tests/integration_tests/cachekeys/commands_tests.py b/tests/integration_tests/cachekeys/commands_tests.py index c40d73b69a76e..cbeb08435c138 100644 --- a/tests/integration_tests/cachekeys/commands_tests.py +++ b/tests/integration_tests/cachekeys/commands_tests.py @@ -17,7 +17,11 @@ import pytest from superset.cachekeys.commands.warm_up_cache import WarmUpCacheCommand -from superset.cachekeys.commands.exceptions import WarmUpCacheChartNotFoundError, WarmUpCacheParametersExpectedError, WarmUpCacheTableNotFoundError +from superset.cachekeys.commands.exceptions import ( + WarmUpCacheChartNotFoundError, + WarmUpCacheParametersExpectedError, + WarmUpCacheTableNotFoundError, +) from superset.extensions import db from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.birth_names_dashboard import ( @@ -49,4 +53,6 @@ def test_warm_up_cache_command_table_not_found(self): def test_warm_up_cache(self): slc = self.get_slice("Girls", db.session) result = WarmUpCacheCommand(slc.id, None, None, None, None).run() - self.assertEqual(result, [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}]) + self.assertEqual( + result, [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}] + ) From 00619470669f0c6acb93badd1f5ccf7e70e372e3 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 11:15:40 -0700 Subject: [PATCH 13/31] isort --- tests/integration_tests/cachekeys/commands_tests.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration_tests/cachekeys/commands_tests.py b/tests/integration_tests/cachekeys/commands_tests.py index cbeb08435c138..bc65feef7d9d7 100644 --- a/tests/integration_tests/cachekeys/commands_tests.py +++ b/tests/integration_tests/cachekeys/commands_tests.py @@ -16,12 +16,12 @@ # under the License. import pytest -from superset.cachekeys.commands.warm_up_cache import WarmUpCacheCommand from superset.cachekeys.commands.exceptions import ( WarmUpCacheChartNotFoundError, WarmUpCacheParametersExpectedError, WarmUpCacheTableNotFoundError, ) +from superset.cachekeys.commands.warm_up_cache import WarmUpCacheCommand from superset.extensions import db from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.birth_names_dashboard import ( @@ -29,8 +29,8 @@ load_birth_names_data, ) from tests.integration_tests.fixtures.energy_dashboard import ( - load_energy_table_with_slice, load_energy_table_data, + load_energy_table_with_slice, ) From f7c297f6e3adcd83d077017444ff7b0924e7b3a0 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 11:41:10 -0700 Subject: [PATCH 14/31] Adjust warm up cache task --- superset/tasks/cache.py | 72 ++++++++++++++++++++--------------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index 5e93503e1783a..38e3322877b08 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -36,21 +36,19 @@ logger.setLevel(logging.INFO) -def get_url(chart: Slice, dashboard: Optional[Dashboard] = None) -> str: - """Return external URL for warming up a given chart/table cache.""" - with app.test_request_context(): - baseurl = "{WEBDRIVER_BASEURL}".format(**app.config) - url = f"{baseurl}api/v1/cachekey/warm_up_cache/?slice_id={chart.id}" - if dashboard: - url += f"&dashboard_id={dashboard.id}" - return url +def get_payload(chart: Slice, dashboard: Optional[Dashboard] = None) -> Dict[str, int]: + """Return payload for warming up a given chart/table cache.""" + payload = {"chart_id": chart.id} + if dashboard: + payload["dashboard_id"] = dashboard.id + return payload class Strategy: # pylint: disable=too-few-public-methods """ A cache warm up strategy. - Each strategy defines a `get_urls` method that returns a list of URLs to + Each strategy defines a `get_payloads` method that returns a list of URLs to be fetched from the `/api/v1/cachekey/warm_up_cache` endpoint. Strategies can be configured in `superset/config.py`: @@ -72,8 +70,8 @@ class Strategy: # pylint: disable=too-few-public-methods def __init__(self) -> None: pass - def get_urls(self) -> List[str]: - raise NotImplementedError("Subclasses must implement get_urls!") + def get_payloads(self) -> List[str]: + raise NotImplementedError("Subclasses must implement get_payloads!") class DummyStrategy(Strategy): # pylint: disable=too-few-public-methods @@ -94,11 +92,11 @@ class DummyStrategy(Strategy): # pylint: disable=too-few-public-methods name = "dummy" - def get_urls(self) -> List[str]: + def get_payloads(self) -> List[str]: session = db.create_scoped_session() charts = session.query(Slice).all() - return [get_url(chart) for chart in charts] + return [get_payload(chart) for chart in charts] class TopNDashboardsStrategy(Strategy): # pylint: disable=too-few-public-methods @@ -126,8 +124,8 @@ def __init__(self, top_n: int = 5, since: str = "7 days ago") -> None: self.top_n = top_n self.since = parse_human_datetime(since) if since else None - def get_urls(self) -> List[str]: - urls = [] + def get_payloads(self) -> List[str]: + payloads = [] session = db.create_scoped_session() records = ( @@ -142,9 +140,9 @@ def get_urls(self) -> List[str]: dashboards = session.query(Dashboard).filter(Dashboard.id.in_(dash_ids)).all() for dashboard in dashboards: for chart in dashboard.slices: - urls.append(get_url(chart, dashboard)) + payloads.append(get_payload(chart, dashboard)) - return urls + return payloads class DashboardTagsStrategy(Strategy): # pylint: disable=too-few-public-methods @@ -169,8 +167,8 @@ def __init__(self, tags: Optional[List[str]] = None) -> None: super().__init__() self.tags = tags or [] - def get_urls(self) -> List[str]: - urls = [] + def get_payloads(self) -> List[str]: + payloads = [] session = db.create_scoped_session() tags = session.query(Tag).filter(Tag.name.in_(self.tags)).all() @@ -191,7 +189,7 @@ def get_urls(self) -> List[str]: tagged_dashboards = session.query(Dashboard).filter(Dashboard.id.in_(dash_ids)) for dashboard in tagged_dashboards: for chart in dashboard.slices: - urls.append(get_url(chart)) + payloads.append(get_payload(chart)) # add charts that are tagged tagged_objects = ( @@ -207,35 +205,37 @@ def get_urls(self) -> List[str]: chart_ids = [tagged_object.object_id for tagged_object in tagged_objects] tagged_charts = session.query(Slice).filter(Slice.id.in_(chart_ids)) for chart in tagged_charts: - urls.append(get_url(chart)) + payloads.append(get_payload(chart)) - return urls + return payloads strategies = [DummyStrategy, TopNDashboardsStrategy, DashboardTagsStrategy] @celery_app.task(name="fetch_url") -def fetch_url(url: str, headers: Dict[str, str]) -> Dict[str, str]: +def fetch_url(data: Dict[str, int], headers: Dict[str, str]) -> Dict[str, str]: """ Celery job to fetch url """ result = {} try: - logger.info("Fetching %s", url) - req = request.Request(url, headers=headers) + baseurl = "{WEBDRIVER_BASEURL}".format(**app.config) + url = f"{baseurl}api/v1/cachekey/warm_up_cache" + logger.info("Fetching %s with payload %s", url, str(data)) + req = request.Request(url, data=data, headers=headers, method="PUT") response = request.urlopen( # pylint: disable=consider-using-with req, timeout=600 ) - logger.info("Fetched %s, status code: %s", url, response.code) + logger.info("Fetched %s with payload %s, status code: %s", url, str(data), response.code) if response.code == 200: - result = {"success": url, "response": response.read().decode("utf-8")} + result = {"success": str(data), "response": response.read().decode("utf-8")} else: - result = {"error": url, "status_code": response.code} - logger.error("Error fetching %s, status code: %s", url, response.code) + result = {"error": str(data), "status_code": response.code} + logger.error("Error fetching %s with payload %s, status code: %s", url, str(data), response.code) except URLError as err: logger.exception("Error warming up cache!") - result = {"error": url, "exception": str(err)} + result = {"error": str(data), "exception": str(err)} return result @@ -273,13 +273,13 @@ def cache_warmup( headers = {"Cookie": f"session={cookies.get('session', '')}"} results: Dict[str, List[str]] = {"scheduled": [], "errors": []} - for url in strategy.get_urls(): + for payload in strategy.get_payloads(): try: - logger.info("Scheduling %s", url) - fetch_url.delay(url, headers) - results["scheduled"].append(url) + logger.info("Scheduling %s", str(payload)) + fetch_url.delay(payload, headers) + results["scheduled"].append(str(payload)) except SchedulingError: - logger.exception("Error scheduling fetch_url: %s", url) - results["errors"].append(url) + logger.exception("Error scheduling fetch_url for payload: %s", str(payload)) + results["errors"].append(str(payload)) return results From 9606c28d58ed129d8fff52166c383aab0c1827b2 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 11:41:47 -0700 Subject: [PATCH 15/31] Formatting --- superset/tasks/cache.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index 38e3322877b08..dada1428c935a 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -227,12 +227,19 @@ def fetch_url(data: Dict[str, int], headers: Dict[str, str]) -> Dict[str, str]: response = request.urlopen( # pylint: disable=consider-using-with req, timeout=600 ) - logger.info("Fetched %s with payload %s, status code: %s", url, str(data), response.code) + logger.info( + "Fetched %s with payload %s, status code: %s", url, str(data), response.code + ) if response.code == 200: result = {"success": str(data), "response": response.read().decode("utf-8")} else: result = {"error": str(data), "status_code": response.code} - logger.error("Error fetching %s with payload %s, status code: %s", url, str(data), response.code) + logger.error( + "Error fetching %s with payload %s, status code: %s", + url, + str(data), + response.code, + ) except URLError as err: logger.exception("Error warming up cache!") result = {"error": str(data), "exception": str(err)} From cfe2e307daf1d2637c7738578c8750b708e54e87 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 11:44:15 -0700 Subject: [PATCH 16/31] Fix doc string --- superset/tasks/cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index dada1428c935a..88bd9e8b3a069 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -48,7 +48,7 @@ class Strategy: # pylint: disable=too-few-public-methods """ A cache warm up strategy. - Each strategy defines a `get_payloads` method that returns a list of URLs to + Each strategy defines a `get_payloads` method that returns a list of payloads to be fetched from the `/api/v1/cachekey/warm_up_cache` endpoint. Strategies can be configured in `superset/config.py`: From b53339cd04c9fe3865e5bc1aded15a6791e9714d Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 5 May 2023 11:50:34 -0700 Subject: [PATCH 17/31] Fix urllib request --- superset/tasks/cache.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index 88bd9e8b3a069..4357d59adf538 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -214,7 +214,7 @@ def get_payloads(self) -> List[str]: @celery_app.task(name="fetch_url") -def fetch_url(data: Dict[str, int], headers: Dict[str, str]) -> Dict[str, str]: +def fetch_url(data: str, headers: Dict[str, str]) -> Dict[str, str]: """ Celery job to fetch url """ @@ -222,27 +222,27 @@ def fetch_url(data: Dict[str, int], headers: Dict[str, str]) -> Dict[str, str]: try: baseurl = "{WEBDRIVER_BASEURL}".format(**app.config) url = f"{baseurl}api/v1/cachekey/warm_up_cache" - logger.info("Fetching %s with payload %s", url, str(data)) + logger.info("Fetching %s with payload %s", url, data) req = request.Request(url, data=data, headers=headers, method="PUT") response = request.urlopen( # pylint: disable=consider-using-with req, timeout=600 ) logger.info( - "Fetched %s with payload %s, status code: %s", url, str(data), response.code + "Fetched %s with payload %s, status code: %s", url, data, response.code ) if response.code == 200: - result = {"success": str(data), "response": response.read().decode("utf-8")} + result = {"success": data, "response": response.read().decode("utf-8")} else: - result = {"error": str(data), "status_code": response.code} + result = {"error": data, "status_code": response.code} logger.error( "Error fetching %s with payload %s, status code: %s", url, - str(data), + data, response.code, ) except URLError as err: logger.exception("Error warming up cache!") - result = {"error": str(data), "exception": str(err)} + result = {"error": data, "exception": str(err)} return result @@ -277,13 +277,16 @@ def cache_warmup( user = security_manager.get_user_by_username(app.config["THUMBNAIL_SELENIUM_USER"]) cookies = MachineAuthProvider.get_auth_cookies(user) - headers = {"Cookie": f"session={cookies.get('session', '')}"} + headers = { + "Cookie": f"session={cookies.get('session', '')}", + "Content-Type": "application/json", + } results: Dict[str, List[str]] = {"scheduled": [], "errors": []} for payload in strategy.get_payloads(): try: logger.info("Scheduling %s", str(payload)) - fetch_url.delay(payload, headers) + fetch_url.delay(str(payload), headers) results["scheduled"].append(str(payload)) except SchedulingError: logger.exception("Error scheduling fetch_url for payload: %s", str(payload)) From 79fde325fd9c4bc251d1908920a0ca6bccb445b8 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Wed, 17 May 2023 21:15:31 -0700 Subject: [PATCH 18/31] Move new endpoint to /charts top-level resource --- superset/cachekeys/api.py | 75 +----------- superset/cachekeys/commands/__init__.py | 16 --- superset/cachekeys/commands/exceptions.py | 38 ------ superset/cachekeys/schemas.py | 41 ------- superset/charts/api.py | 64 ++++++++++ superset/charts/commands/exceptions.py | 18 +++ .../commands/warm_up_cache.py | 2 +- superset/charts/schemas.py | 44 +++++++ superset/tasks/cache.py | 4 +- superset/views/core.py | 2 +- .../integration_tests/cachekeys/api_tests.py | 112 ------------------ .../cachekeys/commands_tests.py | 58 --------- tests/integration_tests/charts/api_tests.py | 111 +++++++++++++++++ .../charts/commands_tests.py | 36 +++++- tests/integration_tests/strategy_tests.py | 6 +- 15 files changed, 281 insertions(+), 346 deletions(-) delete mode 100644 superset/cachekeys/commands/__init__.py delete mode 100644 superset/cachekeys/commands/exceptions.py rename superset/{cachekeys => charts}/commands/warm_up_cache.py (98%) delete mode 100644 tests/integration_tests/cachekeys/commands_tests.py diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py index 3c408fd457f47..cb9b676287a15 100644 --- a/superset/cachekeys/api.py +++ b/superset/cachekeys/api.py @@ -24,13 +24,7 @@ from marshmallow.exceptions import ValidationError from sqlalchemy.exc import SQLAlchemyError -from superset.cachekeys.commands.warm_up_cache import WarmUpCacheCommand -from superset.cachekeys.schemas import ( - CacheInvalidationRequestSchema, - CacheWarmUpRequestSchema, - CacheWarmUpResponseSchema, -) -from superset.commands.exceptions import CommandException +from superset.cachekeys.schemas import CacheInvalidationRequestSchema from superset.connectors.sqla.models import SqlaTable from superset.extensions import cache_manager, db, event_logger, stats_logger_manager from superset.models.cache import CacheKey @@ -46,74 +40,9 @@ class CacheRestApi(BaseSupersetModelRestApi): class_permission_name = "CacheRestApi" include_route_methods = { "invalidate", - "warm_up_cache", } - openapi_spec_component_schemas = ( - CacheInvalidationRequestSchema, - CacheWarmUpRequestSchema, - CacheWarmUpResponseSchema, - ) - - @expose("/warm_up_cache", methods=["PUT"]) - @protect() - @safe - @statsd_metrics - @event_logger.log_this_with_context( - action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" - f".warm_up_cache", - log_to_statsd=False, - ) - def warm_up_cache(self) -> Response: - """ - --- - put: - summary: >- - Warms up the cache for the slice or table - description: >- - Warms up the cache for the slice or table. - Note for slices a force refresh occurs. - In terms of the `extra_filters` these can be obtained from records in the JSON - encoded `logs.json` column associated with the `explore_json` action. - requestBody: - description: >- - Identifies charts to warm up cache for, and any additional dashboard or - filter context to use. Either a chart id or a table name and a database - name can be passed. - required: true - content: - application/json: - schema: - $ref: "#/components/schemas/CacheWarmUpRequestSchema" - responses: - 200: - description: Each chart's warmup status - content: - application/json: - schema: - $ref: "#/components/schemas/CacheWarmUpResponseSchema" - 400: - $ref: '#/components/responses/400' - 404: - $ref: '#/components/responses/404' - 500: - $ref: '#/components/responses/500' - """ - try: - body = CacheWarmUpRequestSchema().load(request.json) - except ValidationError as error: - return self.response_400(message=error.messages) - try: - payload = WarmUpCacheCommand( - body.get("chart_id"), - body.get("dashboard_id"), - body.get("table_name"), - body.get("db_name"), - body.get("extra_filters"), - ).run() - return self.response(200, result=payload) - except CommandException as ex: - return self.response(ex.status, message=ex.message) + openapi_spec_component_schemas = (CacheInvalidationRequestSchema,) @expose("/invalidate", methods=("POST",)) @protect() diff --git a/superset/cachekeys/commands/__init__.py b/superset/cachekeys/commands/__init__.py deleted file mode 100644 index 13a83393a9124..0000000000000 --- a/superset/cachekeys/commands/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/superset/cachekeys/commands/exceptions.py b/superset/cachekeys/commands/exceptions.py deleted file mode 100644 index 84acfb5928299..0000000000000 --- a/superset/cachekeys/commands/exceptions.py +++ /dev/null @@ -1,38 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from flask_babel import lazy_gettext as _ - -from superset.commands.exceptions import CommandException - - -class WarmUpCacheParametersExpectedError(CommandException): - status = 400 - message = _( - "Malformed request. slice_id or table_name " - "and db_name arguments are expected" - ) - - -class WarmUpCacheChartNotFoundError(CommandException): - status = 404 - message = _("Chart not found") - - -class WarmUpCacheTableNotFoundError(CommandException): - status = 404 - message = _("The provided table was not found in the provided database") diff --git a/superset/cachekeys/schemas.py b/superset/cachekeys/schemas.py index 8682daf3d733d..e58a45ac565b9 100644 --- a/superset/cachekeys/schemas.py +++ b/superset/cachekeys/schemas.py @@ -51,44 +51,3 @@ class CacheInvalidationRequestSchema(Schema): fields.Nested(Datasource), metadata={"description": "A list of the data source and database names"}, ) - - -class CacheWarmUpRequestSchema(Schema): - chart_id = fields.Integer( - metadata={"description": "The ID of the chart to warm up cache for"} - ) - dashboard_id = fields.Integer( - metadata={ - "description": "The ID of the dashboard to get filters for when warming cache" - } - ) - table_name = fields.String( - metadata={"description": "The name of the table to warm up cache for"} - ) - db_name = fields.String( - metadata={"description": "The name of the database where the table is located"} - ) - extra_filters = fields.String( - metadata={"description": "Extra filters to apply when warming up cache"} - ) - - -class CacheWarmUpResponseSingleSchema(Schema): - chart_id = fields.Integer( - metadata={"description": "The ID of the chart the status belongs to"} - ) - viz_error = fields.String( - metadata={"description": "Error that occurred when warming cache for chart"} - ) - viz_status = fields.String( - metadata={"description": "Status of the underlying query for the viz"} - ) - - -class CacheWarmUpResponseSchema(Schema): - result = fields.List( - fields.Nested(CacheWarmUpResponseSingleSchema), - metadata={ - "description": "A list of each chart's warmup status and errors if any" - }, - ) diff --git a/superset/charts/api.py b/superset/charts/api.py index 8f9ecae8fb6b3..cb96ee445992c 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -47,6 +47,7 @@ from superset.charts.commands.export import ExportChartsCommand from superset.charts.commands.importers.dispatcher import ImportChartsCommand from superset.charts.commands.update import UpdateChartCommand +from superset.charts.commands.warm_up_cache import WarmUpCacheCommand from superset.charts.dao import ChartDAO from superset.charts.filters import ( ChartAllTextFilter, @@ -60,6 +61,7 @@ ) from superset.charts.schemas import ( CHART_SCHEMAS, + CacheWarmUpRequestSchema, ChartPostSchema, ChartPutSchema, get_delete_ids_schema, @@ -69,6 +71,7 @@ screenshot_query_schema, thumbnail_query_schema, ) +from superset.commands.exceptions import CommandException from superset.commands.importers.exceptions import ( IncorrectFormatError, NoValidFilesFoundError, @@ -118,6 +121,7 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: "thumbnail", "screenshot", "cache_screenshot", + "warm_up_cache", } class_permission_name = "Chart" method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP @@ -949,6 +953,66 @@ def remove_favorite(self, pk: int) -> Response: ChartDAO.remove_favorite(chart) return self.response(200, result="OK") + @expose("/warm_up_cache", methods=("PUT",)) + @protect() + @safe + @statsd_metrics + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" + f".warm_up_cache", + log_to_statsd=False, + ) + def warm_up_cache(self) -> Response: + """ + --- + put: + summary: >- + Warms up the cache for the slice or table + description: >- + Warms up the cache for the slice or table. + Note for slices a force refresh occurs. + In terms of the `extra_filters` these can be obtained from records in the JSON + encoded `logs.json` column associated with the `explore_json` action. + requestBody: + description: >- + Identifies charts to warm up cache for, and any additional dashboard or + filter context to use. Either a chart id or a table name and a database + name can be passed. + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/CacheWarmUpRequestSchema" + responses: + 200: + description: Each chart's warmup status + content: + application/json: + schema: + $ref: "#/components/schemas/CacheWarmUpResponseSchema" + 400: + $ref: '#/components/responses/400' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + try: + body = CacheWarmUpRequestSchema().load(request.json) + except ValidationError as error: + return self.response_400(message=error.messages) + try: + payload = WarmUpCacheCommand( + body.get("chart_id"), + body.get("dashboard_id"), + body.get("table_name"), + body.get("db_name"), + body.get("extra_filters"), + ).run() + return self.response(200, result=payload) + except CommandException as ex: + return self.response(ex.status, message=ex.message) + @expose("/import/", methods=("POST",)) @protect() @statsd_metrics diff --git a/superset/charts/commands/exceptions.py b/superset/charts/commands/exceptions.py index 6d5c078b12289..7de1999646e51 100644 --- a/superset/charts/commands/exceptions.py +++ b/superset/charts/commands/exceptions.py @@ -153,3 +153,21 @@ class ChartBulkDeleteFailedReportsExistError(ChartBulkDeleteFailedError): class ChartImportError(ImportFailedError): message = _("Import chart failed for an unknown reason") + + +class WarmUpCacheParametersExpectedError(CommandException): + status = 400 + message = _( + "Malformed request. slice_id or table_name " + "and db_name arguments are expected" + ) + + +class WarmUpCacheChartNotFoundError(CommandException): + status = 404 + message = _("Chart not found") + + +class WarmUpCacheTableNotFoundError(CommandException): + status = 404 + message = _("The provided table was not found in the provided database") diff --git a/superset/cachekeys/commands/warm_up_cache.py b/superset/charts/commands/warm_up_cache.py similarity index 98% rename from superset/cachekeys/commands/warm_up_cache.py rename to superset/charts/commands/warm_up_cache.py index 6416fffcb4f00..5e0b2973aa81a 100644 --- a/superset/cachekeys/commands/warm_up_cache.py +++ b/superset/charts/commands/warm_up_cache.py @@ -21,7 +21,7 @@ import simplejson as json from flask import g -from superset.cachekeys.commands.exceptions import ( +from superset.charts.commands.exceptions import ( WarmUpCacheChartNotFoundError, WarmUpCacheParametersExpectedError, WarmUpCacheTableNotFoundError, diff --git a/superset/charts/schemas.py b/superset/charts/schemas.py index e70dbad476e69..cf72ed9a05189 100644 --- a/superset/charts/schemas.py +++ b/superset/charts/schemas.py @@ -1586,7 +1586,51 @@ class ImportV1ChartSchema(Schema): external_url = fields.String(allow_none=True) +class CacheWarmUpRequestSchema(Schema): + chart_id = fields.Integer( + metadata={"description": "The ID of the chart to warm up cache for"} + ) + dashboard_id = fields.Integer( + metadata={ + "description": "The ID of the dashboard to get filters for when warming cache" + } + ) + table_name = fields.String( + metadata={"description": "The name of the table to warm up cache for"} + ) + db_name = fields.String( + metadata={"description": "The name of the database where the table is located"} + ) + extra_filters = fields.String( + metadata={"description": "Extra filters to apply when warming up cache"} + ) + + +class CacheWarmUpResponseSingleSchema(Schema): + chart_id = fields.Integer( + metadata={"description": "The ID of the chart the status belongs to"} + ) + viz_error = fields.String( + metadata={"description": "Error that occurred when warming cache for chart"} + ) + viz_status = fields.String( + metadata={"description": "Status of the underlying query for the viz"} + ) + + +class CacheWarmUpResponseSchema(Schema): + result = fields.List( + fields.Nested(CacheWarmUpResponseSingleSchema), + metadata={ + "description": "A list of each chart's warmup status and errors if any" + }, + ) + + + CHART_SCHEMAS = ( + CacheWarmUpRequestSchema, + CacheWarmUpResponseSchema, ChartDataQueryContextSchema, ChartDataResponseSchema, ChartDataAsyncResponseSchema, diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index 4357d59adf538..6b99340b80fea 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -49,7 +49,7 @@ class Strategy: # pylint: disable=too-few-public-methods A cache warm up strategy. Each strategy defines a `get_payloads` method that returns a list of payloads to - be fetched from the `/api/v1/cachekey/warm_up_cache` endpoint. + be fetched from the `/api/v1/chart/warm_up_cache` endpoint. Strategies can be configured in `superset/config.py`: @@ -221,7 +221,7 @@ def fetch_url(data: str, headers: Dict[str, str]) -> Dict[str, str]: result = {} try: baseurl = "{WEBDRIVER_BASEURL}".format(**app.config) - url = f"{baseurl}api/v1/cachekey/warm_up_cache" + url = f"{baseurl}api/v1/chart/warm_up_cache" logger.info("Fetching %s with payload %s", url, data) req = request.Request(url, data=data, headers=headers, method="PUT") response = request.urlopen( # pylint: disable=consider-using-with diff --git a/superset/views/core.py b/superset/views/core.py index 67cda69941e29..1c34cea752613 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1734,7 +1734,7 @@ def fave_slices(self, user_id: Optional[int] = None) -> FlaskResponse: @api @has_access_api @expose("/warm_up_cache/", methods=("GET",)) - @deprecated(new_target="api/v1/cachekey/warm_up_cache") + @deprecated(new_target="api/v1/chart/warm_up_cache") def warm_up_cache( # pylint: disable=too-many-locals,no-self-use self, ) -> FlaskResponse: diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py index 1a3bc908901cc..02ebabad3ce32 100644 --- a/tests/integration_tests/cachekeys/api_tests.py +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -175,115 +175,3 @@ def test_invalidate_existing_caches(invalidate): .datasource_uid == "X__table" ) - - -class TestWarmUpCache(SupersetTestCase): - @pytest.mark.usefixtures( - "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" - ) - def test_warm_up_cache(self): - self.login() - slc = self.get_slice("Girls", db.session) - rv = self.client.put( - "/api/v1/cachekey/warm_up_cache", json={"chart_id": slc.id} - ) - self.assertEqual(rv.status_code, 200) - data = json.loads(rv.data.decode("utf-8")) - - self.assertEqual( - data["result"], - [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], - ) - - rv = self.client.put( - "/api/v1/cachekey/warm_up_cache", - json={ - "table_name": "energy_usage", - "db_name": get_example_database().database_name, - }, - ) - self.assertEqual(rv.status_code, 200) - data = json.loads(rv.data.decode("utf-8")) - - assert len(data["result"]) > 0 - - dashboard = self.get_dash_by_slug("births") - - rv = self.client.put( - "/api/v1/cachekey/warm_up_cache", - json={"chart_id": slc.id, "dashboard_id": dashboard.id}, - ) - self.assertEqual(rv.status_code, 200) - data = json.loads(rv.data.decode("utf-8")) - self.assertEqual( - data["result"], - [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], - ) - - rv = self.client.put( - "/api/v1/cachekey/warm_up_cache", - json={ - "chart_id": slc.id, - "dashboard_id": dashboard.id, - "extra_filters": json.dumps( - [{"col": "name", "op": "in", "val": ["Jennifer"]}] - ), - }, - ) - self.assertEqual(rv.status_code, 200) - data = json.loads(rv.data.decode("utf-8")) - self.assertEqual( - data["result"], - [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], - ) - - def test_warm_up_cache_required_params_missing(self): - self.login() - rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"dashboard_id": 1}) - self.assertEqual(rv.status_code, 400) - data = json.loads(rv.data.decode("utf-8")) - self.assertEqual( - data, - { - "message": "Malformed request. slice_id or table_name and db_name arguments are expected" - }, - ) - - def test_warm_up_cache_chart_not_found(self): - self.login() - rv = self.client.put("/api/v1/cachekey/warm_up_cache", json={"chart_id": 99999}) - self.assertEqual(rv.status_code, 404) - data = json.loads(rv.data.decode("utf-8")) - self.assertEqual(data, {"message": "Chart not found"}) - - def test_warm_up_cache_table_not_found(self): - self.login() - rv = self.client.put( - "/api/v1/cachekey/warm_up_cache", - json={"table_name": "not_here", "db_name": "abc"}, - ) - self.assertEqual(rv.status_code, 404) - data = json.loads(rv.data.decode("utf-8")) - self.assertEqual( - data, - {"message": "The provided table was not found in the provided database"}, - ) - - def test_warm_up_cache_payload_validation(self): - self.login() - rv = self.client.put( - "/api/v1/cachekey/warm_up_cache", - json={"chart_id": "id", "table_name": 2, "db_name": 4}, - ) - self.assertEqual(rv.status_code, 400) - data = json.loads(rv.data.decode("utf-8")) - self.assertEqual( - data, - { - "message": { - "chart_id": ["Not a valid integer."], - "db_name": ["Not a valid string."], - "table_name": ["Not a valid string."], - } - }, - ) diff --git a/tests/integration_tests/cachekeys/commands_tests.py b/tests/integration_tests/cachekeys/commands_tests.py deleted file mode 100644 index bc65feef7d9d7..0000000000000 --- a/tests/integration_tests/cachekeys/commands_tests.py +++ /dev/null @@ -1,58 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import pytest - -from superset.cachekeys.commands.exceptions import ( - WarmUpCacheChartNotFoundError, - WarmUpCacheParametersExpectedError, - WarmUpCacheTableNotFoundError, -) -from superset.cachekeys.commands.warm_up_cache import WarmUpCacheCommand -from superset.extensions import db -from tests.integration_tests.base_tests import SupersetTestCase -from tests.integration_tests.fixtures.birth_names_dashboard import ( - load_birth_names_dashboard_with_slices, - load_birth_names_data, -) -from tests.integration_tests.fixtures.energy_dashboard import ( - load_energy_table_data, - load_energy_table_with_slice, -) - - -class TestWarmUpCacheCommand(SupersetTestCase): - def test_warm_up_cache_command_required_params_missing(self): - with self.assertRaises(WarmUpCacheParametersExpectedError): - WarmUpCacheCommand(None, 1, None, None, None).run() - - def test_warm_up_cache_command_chart_not_found(self): - with self.assertRaises(WarmUpCacheChartNotFoundError): - WarmUpCacheCommand(99999, None, None, None, None).run() - - def test_warm_up_cache_command_table_not_found(self): - with self.assertRaises(WarmUpCacheTableNotFoundError): - WarmUpCacheCommand(None, None, "not_here", "abc", None).run() - - @pytest.mark.usefixtures( - "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" - ) - def test_warm_up_cache(self): - slc = self.get_slice("Girls", db.session) - result = WarmUpCacheCommand(slc.id, None, None, None, None).run() - self.assertEqual( - result, [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}] - ) diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index 5a261fb5520c5..9f0d987c83855 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -34,6 +34,7 @@ from superset.reports.models import ReportSchedule, ReportScheduleType from superset.models.slice import Slice from superset.utils.core import get_example_default_schema +from superset.utils.database import get_example_database from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin from tests.integration_tests.base_tests import SupersetTestCase @@ -1627,3 +1628,113 @@ def test_gets_owned_created_favorited_by_me_filter(self): assert data["result"][0]["slice_name"] == "name0" assert data["result"][0]["datasource_id"] == 1 + + @pytest.mark.usefixtures( + "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + ) + def test_warm_up_cache(self): + self.login() + slc = self.get_slice("Girls", db.session) + rv = self.client.put( + "/api/v1/chart/warm_up_cache", json={"chart_id": slc.id} + ) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + + self.assertEqual( + data["result"], + [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], + ) + + rv = self.client.put( + "/api/v1/chart/warm_up_cache", + json={ + "table_name": "energy_usage", + "db_name": get_example_database().database_name, + }, + ) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + + assert len(data["result"]) > 0 + + dashboard = self.get_dash_by_slug("births") + + rv = self.client.put( + "/api/v1/chart/warm_up_cache", + json={"chart_id": slc.id, "dashboard_id": dashboard.id}, + ) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data["result"], + [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], + ) + + rv = self.client.put( + "/api/v1/chart/warm_up_cache", + json={ + "chart_id": slc.id, + "dashboard_id": dashboard.id, + "extra_filters": json.dumps( + [{"col": "name", "op": "in", "val": ["Jennifer"]}] + ), + }, + ) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data["result"], + [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], + ) + + def test_warm_up_cache_required_params_missing(self): + self.login() + rv = self.client.put("/api/v1/chart/warm_up_cache", json={"dashboard_id": 1}) + self.assertEqual(rv.status_code, 400) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data, + { + "message": "Malformed request. slice_id or table_name and db_name arguments are expected" + }, + ) + + def test_warm_up_cache_chart_not_found(self): + self.login() + rv = self.client.put("/api/v1/chart/warm_up_cache", json={"chart_id": 99999}) + self.assertEqual(rv.status_code, 404) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual(data, {"message": "Chart not found"}) + + def test_warm_up_cache_table_not_found(self): + self.login() + rv = self.client.put( + "/api/v1/chart/warm_up_cache", + json={"table_name": "not_here", "db_name": "abc"}, + ) + self.assertEqual(rv.status_code, 404) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data, + {"message": "The provided table was not found in the provided database"}, + ) + + def test_warm_up_cache_payload_validation(self): + self.login() + rv = self.client.put( + "/api/v1/chart/warm_up_cache", + json={"chart_id": "id", "table_name": 2, "db_name": 4}, + ) + self.assertEqual(rv.status_code, 400) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data, + { + "message": { + "chart_id": ["Not a valid integer."], + "db_name": ["Not a valid string."], + "table_name": ["Not a valid string."], + } + }, + ) diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py index 4d365d56b53a0..88b3acc28150d 100644 --- a/tests/integration_tests/charts/commands_tests.py +++ b/tests/integration_tests/charts/commands_tests.py @@ -23,16 +23,26 @@ from superset import db, security_manager from superset.charts.commands.create import CreateChartCommand -from superset.charts.commands.exceptions import ChartNotFoundError +from superset.charts.commands.exceptions import ( + ChartNotFoundError, + WarmUpCacheChartNotFoundError, + WarmUpCacheParametersExpectedError, + WarmUpCacheTableNotFoundError, +) from superset.charts.commands.export import ExportChartsCommand from superset.charts.commands.importers.v1 import ImportChartsCommand from superset.charts.commands.update import UpdateChartCommand +from superset.charts.commands.warm_up_cache import WarmUpCacheCommand from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database from superset.models.slice import Slice from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) from tests.integration_tests.fixtures.energy_dashboard import ( load_energy_table_data, load_energy_table_with_slice, @@ -442,3 +452,27 @@ def test_query_context_update_command(self, mock_sm_g, mock_g): assert chart.query_context == query_context assert len(chart.owners) == 1 assert chart.owners[0] == admin + + +class TestWarmUpCacheCommand(SupersetTestCase): + def test_warm_up_cache_command_required_params_missing(self): + with self.assertRaises(WarmUpCacheParametersExpectedError): + WarmUpCacheCommand(None, 1, None, None, None).run() + + def test_warm_up_cache_command_chart_not_found(self): + with self.assertRaises(WarmUpCacheChartNotFoundError): + WarmUpCacheCommand(99999, None, None, None, None).run() + + def test_warm_up_cache_command_table_not_found(self): + with self.assertRaises(WarmUpCacheTableNotFoundError): + WarmUpCacheCommand(None, None, "not_here", "abc", None).run() + + @pytest.mark.usefixtures( + "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + ) + def test_warm_up_cache(self): + slc = self.get_slice("Girls", db.session) + result = WarmUpCacheCommand(slc.id, None, None, None, None).run() + self.assertEqual( + result, [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}] + ) diff --git a/tests/integration_tests/strategy_tests.py b/tests/integration_tests/strategy_tests.py index db5ed3bb2fb02..d297af6b7b6fd 100644 --- a/tests/integration_tests/strategy_tests.py +++ b/tests/integration_tests/strategy_tests.py @@ -81,7 +81,7 @@ def test_top_n_dashboards_strategy(self): result = sorted(strategy.get_urls()) expected = sorted( [ - f"{get_url_host()}api/v1/cachekey/warm_up_cache/?slice_id={slc.id}&dashboard_id={dash.id}" + f"{get_url_host()}api/v1/chart/warm_up_cache/?slice_id={slc.id}&dashboard_id={dash.id}" for slc in dash.slices ] ) @@ -112,7 +112,7 @@ def test_dashboard_tags(self): dash = self.get_dash_by_slug("births") tag1_urls = sorted( [ - f"{get_url_host()}api/v1/cachekey/warm_up_cache/?slice_id={slc.id}" + f"{get_url_host()}api/v1/chart/warm_up_cache/?slice_id={slc.id}" for slc in dash.slices ] ) @@ -136,7 +136,7 @@ def test_dashboard_tags(self): dash = self.get_dash_by_slug("unicode-test") slc = dash.slices[0] tag2_urls = [ - f"{get_url_host()}api/v1/cachekey/warm_up_cache/?slice_id={slc.id}" + f"{get_url_host()}api/v1/chart/warm_up_cache/?slice_id={slc.id}" ] object_id = slc.id tagged_object = TaggedObject( From dd732c40962393723856333b2049340d07dd1821 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Wed, 17 May 2023 21:26:37 -0700 Subject: [PATCH 19/31] rm unneeded imports --- tests/integration_tests/cachekeys/api_tests.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/tests/integration_tests/cachekeys/api_tests.py b/tests/integration_tests/cachekeys/api_tests.py index 02ebabad3ce32..d3552bfc8df26 100644 --- a/tests/integration_tests/cachekeys/api_tests.py +++ b/tests/integration_tests/cachekeys/api_tests.py @@ -16,7 +16,6 @@ # under the License. # isort:skip_file """Unit tests for Superset""" -import json from typing import Dict, Any import pytest @@ -24,19 +23,10 @@ from superset.extensions import cache_manager, db from superset.models.cache import CacheKey from superset.utils.core import get_example_default_schema -from superset.utils.database import get_example_database from tests.integration_tests.base_tests import ( SupersetTestCase, post_assert_metric, ) -from tests.integration_tests.fixtures.birth_names_dashboard import ( - load_birth_names_dashboard_with_slices, - load_birth_names_data, -) -from tests.integration_tests.fixtures.energy_dashboard import ( - load_energy_table_with_slice, - load_energy_table_data, -) @pytest.fixture From f4df657c9ea677b369985dce0030698cc71ea504 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Wed, 17 May 2023 23:14:29 -0700 Subject: [PATCH 20/31] Fix cache warmup task --- superset/tasks/cache.py | 18 +++++---- tests/integration_tests/strategy_tests.py | 46 +++++++++-------------- 2 files changed, 29 insertions(+), 35 deletions(-) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index 6b99340b80fea..5472cb3d45c7c 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import json import logging from typing import Any, Dict, List, Optional, Union from urllib import request @@ -49,7 +50,7 @@ class Strategy: # pylint: disable=too-few-public-methods A cache warm up strategy. Each strategy defines a `get_payloads` method that returns a list of payloads to - be fetched from the `/api/v1/chart/warm_up_cache` endpoint. + send to the `/api/v1/chart/warm_up_cache` endpoint. Strategies can be configured in `superset/config.py`: @@ -223,7 +224,9 @@ def fetch_url(data: str, headers: Dict[str, str]) -> Dict[str, str]: baseurl = "{WEBDRIVER_BASEURL}".format(**app.config) url = f"{baseurl}api/v1/chart/warm_up_cache" logger.info("Fetching %s with payload %s", url, data) - req = request.Request(url, data=data, headers=headers, method="PUT") + req = request.Request( + url, data=bytes(data, "utf-8"), headers=headers, method="PUT" + ) response = request.urlopen( # pylint: disable=consider-using-with req, timeout=600 ) @@ -285,11 +288,12 @@ def cache_warmup( results: Dict[str, List[str]] = {"scheduled": [], "errors": []} for payload in strategy.get_payloads(): try: - logger.info("Scheduling %s", str(payload)) - fetch_url.delay(str(payload), headers) - results["scheduled"].append(str(payload)) + payload = json.dumps(payload) + logger.info("Scheduling %s", payload) + fetch_url.delay(payload, headers) + results["scheduled"].append(payload) except SchedulingError: - logger.exception("Error scheduling fetch_url for payload: %s", str(payload)) - results["errors"].append(str(payload)) + logger.exception("Error scheduling fetch_url for payload: %s", payload) + results["errors"].append(payload) return results diff --git a/tests/integration_tests/strategy_tests.py b/tests/integration_tests/strategy_tests.py index d297af6b7b6fd..b295ebb401219 100644 --- a/tests/integration_tests/strategy_tests.py +++ b/tests/integration_tests/strategy_tests.py @@ -78,14 +78,11 @@ def test_top_n_dashboards_strategy(self): self.client.get(f"/superset/dashboard/{dash.id}/") strategy = TopNDashboardsStrategy(1) - result = sorted(strategy.get_urls()) - expected = sorted( - [ - f"{get_url_host()}api/v1/chart/warm_up_cache/?slice_id={slc.id}&dashboard_id={dash.id}" - for slc in dash.slices - ] - ) - self.assertEqual(result, expected) + result = strategy.get_payloads() + expected = [ + {"chart_id": chart.id, "dashboard_id": dash.id} for chart in dash.slices + ] + self.assertCountEqual(result, expected) def reset_tag(self, tag): """Remove associated object from tag, used to reset tests""" @@ -97,59 +94,52 @@ def reset_tag(self, tag): @pytest.mark.usefixtures( "load_unicode_dashboard_with_slice", "load_birth_names_dashboard_with_slices" ) - def test_dashboard_tags(self): + def test_dashboard_tags_strategy(self): tag1 = get_tag("tag1", db.session, TagTypes.custom) # delete first to make test idempotent self.reset_tag(tag1) strategy = DashboardTagsStrategy(["tag1"]) - result = sorted(strategy.get_urls()) + result = strategy.get_payloads() expected = [] self.assertEqual(result, expected) # tag dashboard 'births' with `tag1` tag1 = get_tag("tag1", db.session, TagTypes.custom) dash = self.get_dash_by_slug("births") - tag1_urls = sorted( - [ - f"{get_url_host()}api/v1/chart/warm_up_cache/?slice_id={slc.id}" - for slc in dash.slices - ] - ) + tag1_urls = [{"chart_id": chart.id} for chart in dash.slices] tagged_object = TaggedObject( tag_id=tag1.id, object_id=dash.id, object_type=ObjectTypes.dashboard ) db.session.add(tagged_object) db.session.commit() - self.assertEqual(sorted(strategy.get_urls()), tag1_urls) + self.assertCountEqual(strategy.get_payloads(), tag1_urls) strategy = DashboardTagsStrategy(["tag2"]) tag2 = get_tag("tag2", db.session, TagTypes.custom) self.reset_tag(tag2) - result = sorted(strategy.get_urls()) + result = strategy.get_payloads() expected = [] self.assertEqual(result, expected) # tag first slice dash = self.get_dash_by_slug("unicode-test") - slc = dash.slices[0] - tag2_urls = [ - f"{get_url_host()}api/v1/chart/warm_up_cache/?slice_id={slc.id}" - ] - object_id = slc.id + chart = dash.slices[0] + tag2_urls = [{"chart_id": chart.id}] + object_id = chart.id tagged_object = TaggedObject( tag_id=tag2.id, object_id=object_id, object_type=ObjectTypes.chart ) db.session.add(tagged_object) db.session.commit() - result = sorted(strategy.get_urls()) - self.assertEqual(result, tag2_urls) + result = strategy.get_payloads() + self.assertCountEqual(result, tag2_urls) strategy = DashboardTagsStrategy(["tag1", "tag2"]) - result = sorted(strategy.get_urls()) - expected = sorted(tag1_urls + tag2_urls) - self.assertEqual(result, expected) + result = strategy.get_payloads() + expected = tag1_urls + tag2_urls + self.assertCountEqual(result, expected) From 582e311f567a4f47a0f3988996448a821d41894e Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Wed, 17 May 2023 23:22:43 -0700 Subject: [PATCH 21/31] Address comments --- UPDATING.md | 1 + superset/charts/commands/warm_up_cache.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/UPDATING.md b/UPDATING.md index 36f3645146ab7..80eff2ff036f7 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -24,6 +24,7 @@ assists people when migrating to a new version. ## Next +- [23853](https://github.com/apache/superset/pull/23853): Migrated endpoint `/superset/warm_up_cache` to `/api/v1/charts/warm_up_cache/`. Corresponding permissions are `can warm_up_cache on Superset` to `can warm_up_cache on Chart`. Make sure you add/replace the necessary permissions on any custom roles you may have. - [23785](https://github.com/apache/superset/pull/23785) Deprecated the following feature flags: `CLIENT_CACHE`, `DASHBOARD_CACHE`, `DASHBOARD_FILTERS_EXPERIMENTAL`, `DASHBOARD_NATIVE_FILTERS`, `DASHBOARD_NATIVE_FILTERS_SET`, `DISABLE_DATASET_SOURCE_EDIT`, `ENABLE_EXPLORE_JSON_CSRF_PROTECTION`, `REMOVE_SLICE_LEVEL_LABEL_COLORS`. It also removed `DASHBOARD_EDIT_CHART_IN_NEW_TAB` as the feature is supported without the need for a feature flag. - [23652](https://github.com/apache/superset/pull/23652) Enables GENERIC_CHART_AXES feature flag by default. - [23226](https://github.com/apache/superset/pull/23226) Migrated endpoint `/estimate_query_cost/` to `/api/v1/sqllab/estimate/`. Corresponding permissions are can estimate query cost on SQLLab. Make sure you add/replace the necessary permissions on any custom roles you may have. diff --git a/superset/charts/commands/warm_up_cache.py b/superset/charts/commands/warm_up_cache.py index 5e0b2973aa81a..a87816beb649b 100644 --- a/superset/charts/commands/warm_up_cache.py +++ b/superset/charts/commands/warm_up_cache.py @@ -66,7 +66,7 @@ def run(self) -> List[Dict[str, Any]]: ) if not chart.datasource: - raise Exception("Slice's datasource does not exist") + raise Exception("Chart's datasource does not exist") obj = get_viz( datasource_type=chart.datasource.type, From b6c6f46950e33b1ebeefb3dfe7d3c37c706136a7 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Wed, 17 May 2023 23:23:23 -0700 Subject: [PATCH 22/31] Run black --- superset/charts/schemas.py | 1 - tests/integration_tests/charts/api_tests.py | 4 +--- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/superset/charts/schemas.py b/superset/charts/schemas.py index cf72ed9a05189..a04c5fe12a370 100644 --- a/superset/charts/schemas.py +++ b/superset/charts/schemas.py @@ -1627,7 +1627,6 @@ class CacheWarmUpResponseSchema(Schema): ) - CHART_SCHEMAS = ( CacheWarmUpRequestSchema, CacheWarmUpResponseSchema, diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index d9ad01011ac53..5d526666453cc 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -1742,9 +1742,7 @@ def test_gets_owned_created_favorited_by_me_filter(self): def test_warm_up_cache(self): self.login() slc = self.get_slice("Girls", db.session) - rv = self.client.put( - "/api/v1/chart/warm_up_cache", json={"chart_id": slc.id} - ) + rv = self.client.put("/api/v1/chart/warm_up_cache", json={"chart_id": slc.id}) self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) From 01dd08bed0ae42edc06654996ddb2047680d8326 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Wed, 17 May 2023 23:38:58 -0700 Subject: [PATCH 23/31] Fix import order --- superset/charts/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset/charts/api.py b/superset/charts/api.py index 50ae20ab66efb..8b9d820073238 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -60,8 +60,8 @@ ChartTagFilter, ) from superset.charts.schemas import ( - CHART_SCHEMAS, CacheWarmUpRequestSchema, + CHART_SCHEMAS, ChartPostSchema, ChartPutSchema, get_delete_ids_schema, From 7779d2723a2bf7830d06e056c313b2c1d84bd567 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Wed, 17 May 2023 23:42:36 -0700 Subject: [PATCH 24/31] Fix permission test --- tests/integration_tests/charts/api_tests.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index 5d526666453cc..4ad9017757f10 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -200,7 +200,12 @@ def test_info_security_chart(self): rv = self.get_assert_metric(uri, "info") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 200 - assert set(data["permissions"]) == {"can_read", "can_write", "can_export"} + assert set(data["permissions"]) == { + "can_read", + "can_write", + "can_export", + "can_warm_up_cache", + } def create_chart_import(self): buf = BytesIO() From 1e7d0082b87910a35df88e32e2e62aa5a91b0e9f Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Wed, 17 May 2023 23:51:06 -0700 Subject: [PATCH 25/31] Fix type annotations --- superset/tasks/cache.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index 5472cb3d45c7c..31fb62f1e8fee 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -71,7 +71,7 @@ class Strategy: # pylint: disable=too-few-public-methods def __init__(self) -> None: pass - def get_payloads(self) -> List[str]: + def get_payloads(self) -> List[Dict[str, int]]: raise NotImplementedError("Subclasses must implement get_payloads!") @@ -93,7 +93,7 @@ class DummyStrategy(Strategy): # pylint: disable=too-few-public-methods name = "dummy" - def get_payloads(self) -> List[str]: + def get_payloads(self) -> List[Dict[str, int]]: session = db.create_scoped_session() charts = session.query(Slice).all() @@ -125,7 +125,7 @@ def __init__(self, top_n: int = 5, since: str = "7 days ago") -> None: self.top_n = top_n self.since = parse_human_datetime(since) if since else None - def get_payloads(self) -> List[str]: + def get_payloads(self) -> List[Dict[str, int]]: payloads = [] session = db.create_scoped_session() @@ -168,7 +168,7 @@ def __init__(self, tags: Optional[List[str]] = None) -> None: super().__init__() self.tags = tags or [] - def get_payloads(self) -> List[str]: + def get_payloads(self) -> List[Dict[str, int]]: payloads = [] session = db.create_scoped_session() From 4dcd2fcfe9e5cb4828156060ad3306cb5a8a9f76 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 9 Jun 2023 19:36:33 -0700 Subject: [PATCH 26/31] Split into two endpoints/commands --- superset/charts/api.py | 25 ++-- superset/charts/commands/exceptions.py | 13 -- superset/charts/commands/warm_up_cache.py | 108 ++++++---------- superset/charts/schemas.py | 21 ++-- superset/datasets/api.py | 65 ++++++++++ superset/datasets/commands/exceptions.py | 5 + superset/datasets/commands/warm_up_cache.py | 69 +++++++++++ superset/datasets/schemas.py | 40 ++++++ tests/integration_tests/charts/api_tests.py | 31 +---- .../charts/commands_tests.py | 30 ++--- tests/integration_tests/datasets/api_tests.py | 115 ++++++++++++++++++ .../datasets/commands_tests.py | 32 +++++ 12 files changed, 397 insertions(+), 157 deletions(-) create mode 100644 superset/datasets/commands/warm_up_cache.py diff --git a/superset/charts/api.py b/superset/charts/api.py index 86ca9fac8b826..c3f46196ae91d 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -47,7 +47,7 @@ from superset.charts.commands.export import ExportChartsCommand from superset.charts.commands.importers.dispatcher import ImportChartsCommand from superset.charts.commands.update import UpdateChartCommand -from superset.charts.commands.warm_up_cache import WarmUpCacheCommand +from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand from superset.charts.dao import ChartDAO from superset.charts.filters import ( ChartAllTextFilter, @@ -60,7 +60,7 @@ ChartTagFilter, ) from superset.charts.schemas import ( - CacheWarmUpRequestSchema, + ChartCacheWarmUpRequestSchema, CHART_SCHEMAS, ChartPostSchema, ChartPutSchema, @@ -962,22 +962,21 @@ def warm_up_cache(self) -> Response: --- put: summary: >- - Warms up the cache for the slice or table + Warms up the cache for the chart description: >- - Warms up the cache for the slice or table. + Warms up the cache for the chart. Note for slices a force refresh occurs. In terms of the `extra_filters` these can be obtained from records in the JSON encoded `logs.json` column associated with the `explore_json` action. requestBody: description: >- - Identifies charts to warm up cache for, and any additional dashboard or - filter context to use. Either a chart id or a table name and a database - name can be passed. + Identifies the chart to warm up cache for, and any additional dashboard or + filter context to use. required: true content: application/json: schema: - $ref: "#/components/schemas/CacheWarmUpRequestSchema" + $ref: "#/components/schemas/ChartCacheWarmUpRequestSchema" responses: 200: description: Each chart's warmup status @@ -993,18 +992,16 @@ def warm_up_cache(self) -> Response: $ref: '#/components/responses/500' """ try: - body = CacheWarmUpRequestSchema().load(request.json) + body = ChartCacheWarmUpRequestSchema().load(request.json) except ValidationError as error: return self.response_400(message=error.messages) try: - payload = WarmUpCacheCommand( - body.get("chart_id"), + result = ChartWarmUpCacheCommand( + body["chart_id"], body.get("dashboard_id"), - body.get("table_name"), - body.get("db_name"), body.get("extra_filters"), ).run() - return self.response(200, result=payload) + return self.response(200, result=[result]) except CommandException as ex: return self.response(ex.status, message=ex.message) diff --git a/superset/charts/commands/exceptions.py b/superset/charts/commands/exceptions.py index 7de1999646e51..1079cdca81c90 100644 --- a/superset/charts/commands/exceptions.py +++ b/superset/charts/commands/exceptions.py @@ -155,19 +155,6 @@ class ChartImportError(ImportFailedError): message = _("Import chart failed for an unknown reason") -class WarmUpCacheParametersExpectedError(CommandException): - status = 400 - message = _( - "Malformed request. slice_id or table_name " - "and db_name arguments are expected" - ) - - class WarmUpCacheChartNotFoundError(CommandException): status = 404 message = _("Chart not found") - - -class WarmUpCacheTableNotFoundError(CommandException): - status = 404 - message = _("The provided table was not found in the provided database") diff --git a/superset/charts/commands/warm_up_cache.py b/superset/charts/commands/warm_up_cache.py index a87816beb649b..1ba03ef127be7 100644 --- a/superset/charts/commands/warm_up_cache.py +++ b/superset/charts/commands/warm_up_cache.py @@ -16,101 +16,69 @@ # under the License. -from typing import Any, Dict, List, Optional +from typing import Any, Optional, Union import simplejson as json from flask import g -from superset.charts.commands.exceptions import ( - WarmUpCacheChartNotFoundError, - WarmUpCacheParametersExpectedError, - WarmUpCacheTableNotFoundError, -) +from superset.charts.commands.exceptions import WarmUpCacheChartNotFoundError from superset.commands.base import BaseCommand -from superset.connectors.sqla.models import SqlaTable from superset.extensions import db -from superset.models.core import Database from superset.models.slice import Slice from superset.utils.core import error_msg_from_exception from superset.views.utils import get_dashboard_extra_filters, get_form_data, get_viz -class WarmUpCacheCommand(BaseCommand): +class ChartWarmUpCacheCommand(BaseCommand): # pylint: disable=too-many-arguments def __init__( self, - chart_id: Optional[int], + chart_or_id: Union[int, Slice], dashboard_id: Optional[int], - table_name: Optional[str], - db_name: Optional[str], extra_filters: Optional[str], ): - self._chart_id = chart_id + self._chart_or_id = chart_or_id self._dashboard_id = dashboard_id - self._table_name = table_name - self._db_name = db_name self._extra_filters = extra_filters - self._charts: List[Slice] = [] - def run(self) -> List[Dict[str, Any]]: + def run(self) -> dict[str, Any]: self.validate() - result: List[Dict[str, Any]] = [] - for chart in self._charts: - try: - form_data = get_form_data(chart.id, use_slice_data=True)[0] - if self._dashboard_id: - form_data["extra_filters"] = ( - json.loads(self._extra_filters) - if self._extra_filters - else get_dashboard_extra_filters(chart.id, self._dashboard_id) - ) - - if not chart.datasource: - raise Exception("Chart's datasource does not exist") - - obj = get_viz( - datasource_type=chart.datasource.type, - datasource_id=chart.datasource.id, - form_data=form_data, - force=True, + chart: Slice = self._chart_or_id + try: + form_data = get_form_data(chart.id, use_slice_data=True)[0] + if self._dashboard_id: + form_data["extra_filters"] = ( + json.loads(self._extra_filters) + if self._extra_filters + else get_dashboard_extra_filters(chart.id, self._dashboard_id) ) - # pylint: disable=assigning-non-slot - g.form_data = form_data - payload = obj.get_payload() - delattr(g, "form_data") - error = payload["errors"] or None - status = payload["status"] - except Exception as ex: # pylint: disable=broad-except - error = error_msg_from_exception(ex) - status = None + if not chart.datasource: + raise Exception("Chart's datasource does not exist") - result.append( - {"chart_id": chart.id, "viz_error": error, "viz_status": status} + obj = get_viz( + datasource_type=chart.datasource.type, + datasource_id=chart.datasource.id, + form_data=form_data, + force=True, ) - return result + # pylint: disable=assigning-non-slot + g.form_data = form_data + payload = obj.get_payload() + delattr(g, "form_data") + error = payload["errors"] or None + status = payload["status"] + except Exception as ex: # pylint: disable=broad-except + error = error_msg_from_exception(ex) + status = None + + return {"chart_id": chart.id, "viz_error": error, "viz_status": status} def validate(self) -> None: - if not self._chart_id and not (self._table_name and self._db_name): - raise WarmUpCacheParametersExpectedError() - if self._chart_id: - self._charts = db.session.query(Slice).filter_by(id=self._chart_id).all() - if not self._charts: - raise WarmUpCacheChartNotFoundError() - elif self._table_name and self._db_name: - table = ( - db.session.query(SqlaTable) - .join(Database) - .filter( - Database.database_name == self._db_name, - SqlaTable.table_name == self._table_name, - ) - ).one_or_none() - if not table: - raise WarmUpCacheTableNotFoundError() - self._charts = ( - db.session.query(Slice) - .filter_by(datasource_id=table.id, datasource_type=table.type) - .all() - ) + if isinstance(self._chart_or_id, Slice): + return + chart = db.session.query(Slice).filter_by(id=self._chart_or_id).scalar() + if not chart: + raise WarmUpCacheChartNotFoundError() + self._chart_or_id = chart diff --git a/superset/charts/schemas.py b/superset/charts/schemas.py index 840d7e17f8053..f3958ab8feebf 100644 --- a/superset/charts/schemas.py +++ b/superset/charts/schemas.py @@ -1557,27 +1557,22 @@ class ImportV1ChartSchema(Schema): external_url = fields.String(allow_none=True) -class CacheWarmUpRequestSchema(Schema): +class ChartCacheWarmUpRequestSchema(Schema): chart_id = fields.Integer( - metadata={"description": "The ID of the chart to warm up cache for"} + required=True, + metadata={"description": "The ID of the chart to warm up cache for"}, ) dashboard_id = fields.Integer( metadata={ "description": "The ID of the dashboard to get filters for when warming cache" } ) - table_name = fields.String( - metadata={"description": "The name of the table to warm up cache for"} - ) - db_name = fields.String( - metadata={"description": "The name of the database where the table is located"} - ) extra_filters = fields.String( metadata={"description": "Extra filters to apply when warming up cache"} ) -class CacheWarmUpResponseSingleSchema(Schema): +class ChartCacheWarmUpResponseSingleSchema(Schema): chart_id = fields.Integer( metadata={"description": "The ID of the chart the status belongs to"} ) @@ -1589,9 +1584,9 @@ class CacheWarmUpResponseSingleSchema(Schema): ) -class CacheWarmUpResponseSchema(Schema): +class ChartCacheWarmUpResponseSchema(Schema): result = fields.List( - fields.Nested(CacheWarmUpResponseSingleSchema), + fields.Nested(ChartCacheWarmUpResponseSingleSchema), metadata={ "description": "A list of each chart's warmup status and errors if any" }, @@ -1599,8 +1594,8 @@ class CacheWarmUpResponseSchema(Schema): CHART_SCHEMAS = ( - CacheWarmUpRequestSchema, - CacheWarmUpResponseSchema, + ChartCacheWarmUpRequestSchema, + ChartCacheWarmUpResponseSchema, ChartDataQueryContextSchema, ChartDataResponseSchema, ChartDataAsyncResponseSchema, diff --git a/superset/datasets/api.py b/superset/datasets/api.py index 6568ba379367c..3c228733715c9 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -29,6 +29,7 @@ from marshmallow import ValidationError from superset import event_logger, is_feature_enabled +from superset.commands.exceptions import CommandException from superset.commands.importers.exceptions import NoValidFilesFoundError from superset.commands.importers.v1.utils import get_contents_from_bundle from superset.connectors.sqla.models import SqlaTable @@ -52,9 +53,12 @@ from superset.datasets.commands.importers.dispatcher import ImportDatasetsCommand from superset.datasets.commands.refresh import RefreshDatasetCommand from superset.datasets.commands.update import UpdateDatasetCommand +from superset.datasets.commands.warm_up_cache import DatasetWarmUpCacheCommand from superset.datasets.dao import DatasetDAO from superset.datasets.filters import DatasetCertifiedFilter, DatasetIsNullOrEmptyFilter from superset.datasets.schemas import ( + DatasetCacheWarmUpRequestSchema, + DatasetCacheWarmUpResponseSchema, DatasetDuplicateSchema, DatasetPostSchema, DatasetPutSchema, @@ -95,6 +99,7 @@ class DatasetRestApi(BaseSupersetModelRestApi): "related_objects", "duplicate", "get_or_create_dataset", + "warm_up_cache", } list_columns = [ "id", @@ -246,6 +251,8 @@ class DatasetRestApi(BaseSupersetModelRestApi): "get_export_ids_schema": get_export_ids_schema, } openapi_spec_component_schemas = ( + DatasetCacheWarmUpRequestSchema, + DatasetCacheWarmUpResponseSchema, DatasetRelatedObjectsResponse, DatasetDuplicateSchema, GetOrCreateDatasetSchema, @@ -994,3 +1001,61 @@ def get_or_create_dataset(self) -> Response: exc_info=True, ) return self.response_422(message=ex.message) + + @expose("/warm_up_cache", methods=("PUT",)) + @protect() + @safe + @statsd_metrics + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" + f".warm_up_cache", + log_to_statsd=False, + ) + def warm_up_cache(self) -> Response: + """ + --- + put: + summary: >- + Warms up the cache for each chart powered by the given table + description: >- + Warms up the cache for the table. + Note for slices a force refresh occurs. + In terms of the `extra_filters` these can be obtained from records in the JSON + encoded `logs.json` column associated with the `explore_json` action. + requestBody: + description: >- + Identifies the database and table to warm up cache for, and any + additional dashboard or filter context to use. + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/DatasetCacheWarmUpRequestSchema" + responses: + 200: + description: Each chart's warmup status + content: + application/json: + schema: + $ref: "#/components/schemas/DatasetCacheWarmUpResponseSchema" + 400: + $ref: '#/components/responses/400' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + try: + body = DatasetCacheWarmUpRequestSchema().load(request.json) + except ValidationError as error: + return self.response_400(message=error.messages) + try: + result = DatasetWarmUpCacheCommand( + body["db_name"], + body["table_name"], + body.get("dashboard_id"), + body.get("extra_filters"), + ).run() + return self.response(200, result=result) + except CommandException as ex: + return self.response(ex.status, message=ex.message) diff --git a/superset/datasets/commands/exceptions.py b/superset/datasets/commands/exceptions.py index e06e92802f04c..7c6ef86634f3a 100644 --- a/superset/datasets/commands/exceptions.py +++ b/superset/datasets/commands/exceptions.py @@ -212,3 +212,8 @@ class DatasetDuplicateFailedError(CreateFailedError): class DatasetForbiddenDataURI(ImportFailedError): message = _("Data URI is not allowed.") + + +class WarmUpCacheTableNotFoundError(CommandException): + status = 404 + message = _("The provided table was not found in the provided database") diff --git a/superset/datasets/commands/warm_up_cache.py b/superset/datasets/commands/warm_up_cache.py new file mode 100644 index 0000000000000..62044e7224f36 --- /dev/null +++ b/superset/datasets/commands/warm_up_cache.py @@ -0,0 +1,69 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +from typing import Any, Optional + +from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand +from superset.commands.base import BaseCommand +from superset.connectors.sqla.models import SqlaTable +from superset.datasets.commands.exceptions import WarmUpCacheTableNotFoundError +from superset.extensions import db +from superset.models.core import Database +from superset.models.slice import Slice + + +class DatasetWarmUpCacheCommand(BaseCommand): + # pylint: disable=too-many-arguments + def __init__( + self, + db_name: str, + table_name: str, + dashboard_id: Optional[int], + extra_filters: Optional[str], + ): + self._db_name = db_name + self._table_name = table_name + self._dashboard_id = dashboard_id + self._extra_filters = extra_filters + self._charts: list[Slice] = [] + + def run(self) -> list[dict[str, Any]]: + self.validate() + return [ + ChartWarmUpCacheCommand( + chart, self._dashboard_id, self._extra_filters + ).run() + for chart in self._charts + ] + + def validate(self) -> None: + table = ( + db.session.query(SqlaTable) + .join(Database) + .filter( + Database.database_name == self._db_name, + SqlaTable.table_name == self._table_name, + ) + ).one_or_none() + if not table: + raise WarmUpCacheTableNotFoundError() + self._charts = ( + db.session.query(Slice) + .filter_by(datasource_id=table.id, datasource_type=table.type) + .all() + ) diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py index 9a2af980666e8..f95897ce59b15 100644 --- a/superset/datasets/schemas.py +++ b/superset/datasets/schemas.py @@ -254,3 +254,43 @@ class Meta: # pylint: disable=too-few-public-methods model = Dataset load_instance = True include_relationships = True + + +class DatasetCacheWarmUpRequestSchema(Schema): + db_name = fields.String( + required=True, + metadata={"description": "The name of the database where the table is located"}, + ) + table_name = fields.String( + required=True, + metadata={"description": "The name of the table to warm up cache for"}, + ) + dashboard_id = fields.Integer( + metadata={ + "description": "The ID of the dashboard to get filters for when warming cache" + } + ) + extra_filters = fields.String( + metadata={"description": "Extra filters to apply when warming up cache"} + ) + + +class DatasetCacheWarmUpResponseSingleSchema(Schema): + chart_id = fields.Integer( + metadata={"description": "The ID of the chart the status belongs to"} + ) + viz_error = fields.String( + metadata={"description": "Error that occurred when warming cache for chart"} + ) + viz_status = fields.String( + metadata={"description": "Status of the underlying query for the viz"} + ) + + +class DatasetCacheWarmUpResponseSchema(Schema): + result = fields.List( + fields.Nested(DatasetCacheWarmUpResponseSingleSchema), + metadata={ + "description": "A list of each chart's warmup status and errors if any" + }, + ) diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index f8e543467603b..b0f8d1cc7953b 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -1755,18 +1755,6 @@ def test_warm_up_cache(self): [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], ) - rv = self.client.put( - "/api/v1/chart/warm_up_cache", - json={ - "table_name": "energy_usage", - "db_name": get_example_database().database_name, - }, - ) - self.assertEqual(rv.status_code, 200) - data = json.loads(rv.data.decode("utf-8")) - - assert len(data["result"]) > 0 - dashboard = self.get_dash_by_slug("births") rv = self.client.put( @@ -1797,16 +1785,14 @@ def test_warm_up_cache(self): [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}], ) - def test_warm_up_cache_required_params_missing(self): + def test_warm_up_cache_chart_id_required(self): self.login() rv = self.client.put("/api/v1/chart/warm_up_cache", json={"dashboard_id": 1}) self.assertEqual(rv.status_code, 400) data = json.loads(rv.data.decode("utf-8")) self.assertEqual( data, - { - "message": "Malformed request. slice_id or table_name and db_name arguments are expected" - }, + {"message": {"chart_id": ["Missing data for required field."]}}, ) def test_warm_up_cache_chart_not_found(self): @@ -1816,19 +1802,6 @@ def test_warm_up_cache_chart_not_found(self): data = json.loads(rv.data.decode("utf-8")) self.assertEqual(data, {"message": "Chart not found"}) - def test_warm_up_cache_table_not_found(self): - self.login() - rv = self.client.put( - "/api/v1/chart/warm_up_cache", - json={"table_name": "not_here", "db_name": "abc"}, - ) - self.assertEqual(rv.status_code, 404) - data = json.loads(rv.data.decode("utf-8")) - self.assertEqual( - data, - {"message": "The provided table was not found in the provided database"}, - ) - def test_warm_up_cache_payload_validation(self): self.login() rv = self.client.put( diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py index 88b3acc28150d..217b1655a5f05 100644 --- a/tests/integration_tests/charts/commands_tests.py +++ b/tests/integration_tests/charts/commands_tests.py @@ -26,13 +26,11 @@ from superset.charts.commands.exceptions import ( ChartNotFoundError, WarmUpCacheChartNotFoundError, - WarmUpCacheParametersExpectedError, - WarmUpCacheTableNotFoundError, ) from superset.charts.commands.export import ExportChartsCommand from superset.charts.commands.importers.v1 import ImportChartsCommand from superset.charts.commands.update import UpdateChartCommand -from superset.charts.commands.warm_up_cache import WarmUpCacheCommand +from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError from superset.connectors.sqla.models import SqlaTable @@ -454,25 +452,21 @@ def test_query_context_update_command(self, mock_sm_g, mock_g): assert chart.owners[0] == admin -class TestWarmUpCacheCommand(SupersetTestCase): - def test_warm_up_cache_command_required_params_missing(self): - with self.assertRaises(WarmUpCacheParametersExpectedError): - WarmUpCacheCommand(None, 1, None, None, None).run() - +class TestChartWarmUpCacheCommand(SupersetTestCase): def test_warm_up_cache_command_chart_not_found(self): with self.assertRaises(WarmUpCacheChartNotFoundError): - WarmUpCacheCommand(99999, None, None, None, None).run() - - def test_warm_up_cache_command_table_not_found(self): - with self.assertRaises(WarmUpCacheTableNotFoundError): - WarmUpCacheCommand(None, None, "not_here", "abc", None).run() + ChartWarmUpCacheCommand(99999, None, None).run() - @pytest.mark.usefixtures( - "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" - ) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_warm_up_cache(self): slc = self.get_slice("Girls", db.session) - result = WarmUpCacheCommand(slc.id, None, None, None, None).run() + result = ChartWarmUpCacheCommand(slc.id, None, None).run() + self.assertEqual( + result, {"chart_id": slc.id, "viz_error": None, "viz_status": "success"} + ) + + # can just pass in chart as well + result = ChartWarmUpCacheCommand(slc, None, None).run() self.assertEqual( - result, [{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}] + result, {"chart_id": slc.id, "viz_error": None, "viz_status": "success"} ) diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index 6c99efd358c93..c6dcabacbc813 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -39,6 +39,7 @@ from superset.datasets.models import Dataset from superset.extensions import db, security_manager from superset.models.core import Database +from superset.models.slice import Slice from superset.utils.core import backend, get_example_default_schema from superset.utils.database import get_example_database, get_main_database from superset.utils.dict_import_export import export_to_dict @@ -2556,3 +2557,117 @@ def test_get_or_create_dataset_creates_table(self): with examples_db.get_sqla_engine_with_context() as engine: engine.execute("DROP TABLE test_create_sqla_table_api") db.session.commit() + + @pytest.mark.usefixtures( + "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + ) + def test_warm_up_cache(self): + """ + Dataset API: Test warm up cache endpoint + """ + self.login() + energy_table = self.get_energy_usage_dataset() + energy_charts = ( + db.session.query(Slice) + .filter( + Slice.datasource_id == energy_table.id, Slice.datasource_type == "table" + ) + .all() + ) + rv = self.client.put( + "/api/v1/dataset/warm_up_cache", + json={ + "table_name": "energy_usage", + "db_name": get_example_database().database_name, + }, + ) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + len(data["result"]), + len(energy_charts), + ) + for chart_result in data["result"]: + assert "chart_id" in chart_result + assert "viz_error" in chart_result + assert "viz_status" in chart_result + + # With dashboard id + dashboard = self.get_dash_by_slug("births") + birth_table = self.get_birth_names_dataset() + birth_charts = ( + db.session.query(Slice) + .filter( + Slice.datasource_id == birth_table.id, Slice.datasource_type == "table" + ) + .all() + ) + rv = self.client.put( + "/api/v1/dataset/warm_up_cache", + json={ + "table_name": "birth_names", + "db_name": get_example_database().database_name, + "dashboard_id": dashboard.id, + }, + ) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + len(data["result"]), + len(birth_charts), + ) + for chart_result in data["result"]: + assert "chart_id" in chart_result + assert "viz_error" in chart_result + assert "viz_status" in chart_result + + # With extra filters + rv = self.client.put( + "/api/v1/dataset/warm_up_cache", + json={ + "table_name": "birth_names", + "db_name": get_example_database().database_name, + "dashboard_id": dashboard.id, + "extra_filters": json.dumps( + [{"col": "name", "op": "in", "val": ["Jennifer"]}] + ), + }, + ) + self.assertEqual(rv.status_code, 200) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + len(data["result"]), + len(birth_charts), + ) + for chart_result in data["result"]: + assert "chart_id" in chart_result + assert "viz_error" in chart_result + assert "viz_status" in chart_result + + def test_warm_up_cache_db_and_table_name_required(self): + self.login() + rv = self.client.put("/api/v1/dataset/warm_up_cache", json={"dashboard_id": 1}) + self.assertEqual(rv.status_code, 400) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data, + { + "message": { + "db_name": ["Missing data for required field."], + "table_name": ["Missing data for required field."], + } + }, + ) + + def test_warm_up_cache_table_not_found(self): + self.login() + rv = self.client.put( + "/api/v1/dataset/warm_up_cache", + json={"table_name": "not_here", "db_name": "abc"}, + ) + self.assertEqual(rv.status_code, 404) + data = json.loads(rv.data.decode("utf-8")) + self.assertEqual( + data, + {"message": "The provided table was not found in the provided database"}, + ) diff --git a/tests/integration_tests/datasets/commands_tests.py b/tests/integration_tests/datasets/commands_tests.py index 953c34059fdd8..34a0625b36926 100644 --- a/tests/integration_tests/datasets/commands_tests.py +++ b/tests/integration_tests/datasets/commands_tests.py @@ -31,13 +31,20 @@ from superset.datasets.commands.exceptions import ( DatasetInvalidError, DatasetNotFoundError, + WarmUpCacheTableNotFoundError, ) from superset.datasets.commands.export import ExportDatasetsCommand from superset.datasets.commands.importers import v0, v1 +from superset.datasets.commands.warm_up_cache import DatasetWarmUpCacheCommand from superset.models.core import Database +from superset.models.slice import Slice from superset.utils.core import get_example_default_schema from superset.utils.database import get_example_database from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) from tests.integration_tests.fixtures.energy_dashboard import ( load_energy_table_data, load_energy_table_with_slice, @@ -575,3 +582,28 @@ def test_create_dataset_command(self, mock_g, mock_g2): with examples_db.get_sqla_engine_with_context() as engine: engine.execute("DROP TABLE test_create_dataset_command") db.session.commit() + + +class TestDatasetWarmUpCacheCommand(SupersetTestCase): + def test_warm_up_cache_command_table_not_found(self): + with self.assertRaises(WarmUpCacheTableNotFoundError): + DatasetWarmUpCacheCommand("not", "here", None, None).run() + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_warm_up_cache(self): + birth_table = self.get_birth_names_dataset() + birth_charts = ( + db.session.query(Slice) + .filter( + Slice.datasource_id == birth_table.id, Slice.datasource_type == "table" + ) + .all() + ) + results = DatasetWarmUpCacheCommand( + get_example_database().database_name, "birth_names", None, None + ).run() + self.assertEqual(len(results), len(birth_charts)) + for chart_result in results: + assert "chart_id" in chart_result + assert "viz_error" in chart_result + assert "viz_status" in chart_result From 0d3f58aaea483e75ddfd0212e41ff3964fc18b3d Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 9 Jun 2023 19:42:09 -0700 Subject: [PATCH 27/31] Dict -> dict --- superset/tasks/cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index ad45e00d0fe8c..68b5657a22545 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -37,7 +37,7 @@ logger.setLevel(logging.INFO) -def get_payload(chart: Slice, dashboard: Optional[Dashboard] = None) -> Dict[str, int]: +def get_payload(chart: Slice, dashboard: Optional[Dashboard] = None) -> dict[str, int]: """Return payload for warming up a given chart/table cache.""" payload = {"chart_id": chart.id} if dashboard: From 13d152947086a2bbdda7e217ba2f159d51d4046d Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 9 Jun 2023 19:47:35 -0700 Subject: [PATCH 28/31] Fix openapi schema ref --- superset/charts/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset/charts/api.py b/superset/charts/api.py index c3f46196ae91d..9fa5a56cedd3f 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -983,7 +983,7 @@ def warm_up_cache(self) -> Response: content: application/json: schema: - $ref: "#/components/schemas/CacheWarmUpResponseSchema" + $ref: "#/components/schemas/ChartCacheWarmUpResponseSchema" 400: $ref: '#/components/responses/400' 404: From c9bc42f70d04579fed9921a49b823376e611115f Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 9 Jun 2023 19:58:34 -0700 Subject: [PATCH 29/31] Fix lint --- superset/charts/commands/warm_up_cache.py | 2 +- superset/datasets/api.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/superset/charts/commands/warm_up_cache.py b/superset/charts/commands/warm_up_cache.py index 1ba03ef127be7..6fe9f94ffa620 100644 --- a/superset/charts/commands/warm_up_cache.py +++ b/superset/charts/commands/warm_up_cache.py @@ -43,7 +43,7 @@ def __init__( def run(self) -> dict[str, Any]: self.validate() - chart: Slice = self._chart_or_id + chart: Slice = self._chart_or_id # type: ignore try: form_data = get_form_data(chart.id, use_slice_data=True)[0] if self._dashboard_id: diff --git a/superset/datasets/api.py b/superset/datasets/api.py index 3c228733715c9..fa5173458a43d 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# pylint: disable=too-many-lines import json import logging from datetime import datetime From dff0d4f5ceaae48fb0f6cdde53e11b22d01494d8 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 9 Jun 2023 20:26:44 -0700 Subject: [PATCH 30/31] Fix test --- superset/charts/api.py | 2 +- tests/integration_tests/charts/api_tests.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/superset/charts/api.py b/superset/charts/api.py index 9fa5a56cedd3f..48e0856c85496 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -60,8 +60,8 @@ ChartTagFilter, ) from superset.charts.schemas import ( - ChartCacheWarmUpRequestSchema, CHART_SCHEMAS, + ChartCacheWarmUpRequestSchema, ChartPostSchema, ChartPutSchema, get_delete_ids_schema, diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index b0f8d1cc7953b..53faa4b3a048e 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -1806,17 +1806,18 @@ def test_warm_up_cache_payload_validation(self): self.login() rv = self.client.put( "/api/v1/chart/warm_up_cache", - json={"chart_id": "id", "table_name": 2, "db_name": 4}, + json={"chart_id": "id", "dashboard_id": "id", "extra_filters": 4}, ) self.assertEqual(rv.status_code, 400) data = json.loads(rv.data.decode("utf-8")) + print(data) self.assertEqual( data, { "message": { "chart_id": ["Not a valid integer."], - "db_name": ["Not a valid string."], - "table_name": ["Not a valid string."], + "dashboard_id": ["Not a valid integer."], + "extra_filters": ["Not a valid string."], } }, ) From bcb88d159e1d33aa28f1a4d95402eb45302c6f48 Mon Sep 17 00:00:00 2001 From: Jack Fragassi Date: Fri, 9 Jun 2023 21:11:14 -0700 Subject: [PATCH 31/31] Fix security test --- tests/integration_tests/datasets/api_tests.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index c6dcabacbc813..508dd29788b72 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -516,6 +516,7 @@ def test_info_security_dataset(self): "can_export", "can_duplicate", "can_get_or_create_dataset", + "can_warm_up_cache", } def test_create_dataset_item(self):