From 94e245d5ebe8e5dd6b3a339f9a275e0fb26855d9 Mon Sep 17 00:00:00 2001 From: "Hugh A. Miles II" Date: Thu, 24 Feb 2022 09:24:33 -0800 Subject: [PATCH 01/37] add force condition for exporting csv (#18911) --- .../src/dashboard/components/gridComponents/Chart.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx b/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx index d434155df105f..0137349ed9d83 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx +++ b/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx @@ -274,6 +274,7 @@ export default class Chart extends React.Component { : this.props.formData, resultType: 'full', resultFormat: 'csv', + force: true, }); } From da3bc48803e2653be127f948e259a762fa0f9718 Mon Sep 17 00:00:00 2001 From: David Aaron Suddjian <1858430+suddjian@users.noreply.github.com> Date: Thu, 24 Feb 2022 10:09:41 -0800 Subject: [PATCH 02/37] feat: On window focus, redirect to login if the user has been logged out (#18773) * /me api * test it * watch for window activation and check auth * simplify * more comment * making ci happy * mypy should ignore tests --- .../src/dashboard/util/findPermission.test.ts | 1 + superset-frontend/src/preamble.ts | 30 +++++++++- .../src/profile/components/fixtures.tsx | 1 + superset-frontend/src/types/bootstrapTypes.ts | 1 + superset/initialization/__init__.py | 2 + superset/users/api.py | 56 +++++++++++++++++++ superset/users/schemas.py | 28 ++++++++++ superset/views/utils.py | 1 + tests/integration_tests/security_tests.py | 1 + tests/integration_tests/users/api_tests.py | 49 ++++++++++++++++ 10 files changed, 168 insertions(+), 2 deletions(-) create mode 100644 superset/users/api.py create mode 100644 superset/users/schemas.py create mode 100644 tests/integration_tests/users/api_tests.py diff --git a/superset-frontend/src/dashboard/util/findPermission.test.ts b/superset-frontend/src/dashboard/util/findPermission.test.ts index f90c2800f4a37..8930549f4a7e9 100644 --- a/superset-frontend/src/dashboard/util/findPermission.test.ts +++ b/superset-frontend/src/dashboard/util/findPermission.test.ts @@ -75,6 +75,7 @@ describe('canUserEditDashboard', () => { email: 'user@example.com', firstName: 'Test', isActive: true, + isAnonymous: false, lastName: 'User', userId: 1, username: 'owner', diff --git a/superset-frontend/src/preamble.ts b/superset-frontend/src/preamble.ts index 0547e8a6abcca..7bd6dbe29b365 100644 --- a/superset-frontend/src/preamble.ts +++ b/superset-frontend/src/preamble.ts @@ -20,19 +20,24 @@ import { setConfig as setHotLoaderConfig } from 'react-hot-loader'; import 'abortcontroller-polyfill/dist/abortcontroller-polyfill-only'; import moment from 'moment'; // eslint-disable-next-line no-restricted-imports -import { configure, supersetTheme } from '@superset-ui/core'; +import { configure, makeApi, supersetTheme } from '@superset-ui/core'; import { merge } from 'lodash'; import setupClient from './setup/setupClient'; import setupColors from './setup/setupColors'; import setupFormatters from './setup/setupFormatters'; import setupDashboardComponents from './setup/setupDasboardComponents'; +import { User } from './types/bootstrapTypes'; if (process.env.WEBPACK_MODE === 'development') { setHotLoaderConfig({ logLevel: 'debug', trackTailUpdates: false }); } // eslint-disable-next-line import/no-mutable-exports -export let bootstrapData: any; +export let bootstrapData: { + user?: User | undefined; + common?: any; + config?: any; +} = {}; // Configure translation if (typeof window !== 'undefined') { const root = document.getElementById('app'); @@ -67,3 +72,24 @@ export const theme = merge( supersetTheme, bootstrapData?.common?.theme_overrides ?? {}, ); + +const getMe = makeApi({ + method: 'GET', + endpoint: '/api/v1/me/', +}); + +/** + * When you re-open the window, we check if you are still logged in. + * If your session expired or you signed out, we'll redirect to login. + * If you aren't logged in in the first place (!isActive), then we shouldn't do this. + */ +if (bootstrapData.user?.isActive) { + document.addEventListener('visibilitychange', () => { + // we only care about the tab becoming visible, not vice versa + if (document.visibilityState !== 'visible') return; + + getMe().catch(() => { + // ignore error, SupersetClient will redirect to login on a 401 + }); + }); +} diff --git a/superset-frontend/src/profile/components/fixtures.tsx b/superset-frontend/src/profile/components/fixtures.tsx index 90e97da22c762..e721b6dfa7510 100644 --- a/superset-frontend/src/profile/components/fixtures.tsx +++ b/superset-frontend/src/profile/components/fixtures.tsx @@ -40,6 +40,7 @@ export const user: UserWithPermissionsAndRoles = { userId: 5, email: 'alpha@alpha.com', isActive: true, + isAnonymous: false, permissions: { datasource_access: ['table1', 'table2'], database_access: ['db1', 'db2', 'db3'], diff --git a/superset-frontend/src/types/bootstrapTypes.ts b/superset-frontend/src/types/bootstrapTypes.ts index 15e7eba0a0313..dc41eb5878f81 100644 --- a/superset-frontend/src/types/bootstrapTypes.ts +++ b/superset-frontend/src/types/bootstrapTypes.ts @@ -23,6 +23,7 @@ export type User = { email: string; firstName: string; isActive: boolean; + isAnonymous: boolean; lastName: string; userId: number; username: string; diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index 25eab15aaff52..8c0e0a3b44478 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -50,6 +50,7 @@ ) from superset.security import SupersetSecurityManager from superset.typing import FlaskResponse +from superset.users.api import CurrentUserRestApi from superset.utils.core import pessimistic_connection_handling from superset.utils.log import DBEventLogger, get_event_logger_from_cfg_value @@ -205,6 +206,7 @@ def init_views(self) -> None: appbuilder.add_api(ChartRestApi) appbuilder.add_api(ChartDataRestApi) appbuilder.add_api(CssTemplateRestApi) + appbuilder.add_api(CurrentUserRestApi) appbuilder.add_api(DashboardFilterStateRestApi) appbuilder.add_api(DashboardRestApi) appbuilder.add_api(DatabaseRestApi) diff --git a/superset/users/api.py b/superset/users/api.py new file mode 100644 index 0000000000000..7d52056a8f070 --- /dev/null +++ b/superset/users/api.py @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from flask import g, Response +from flask_appbuilder.api import BaseApi, expose, safe + +from .schemas import UserResponseSchema + +user_response_schema = UserResponseSchema() + + +class CurrentUserRestApi(BaseApi): + """ An api to get information about the current user """ + + resource_name = "me" + openapi_spec_tag = "Current User" + openapi_spec_component_schemas = (UserResponseSchema,) + + @expose("/", methods=["GET"]) + @safe + def me(self) -> Response: + """Get the user object corresponding to the agent making the request + --- + get: + description: >- + Returns the user object corresponding to the agent making the request, + or returns a 401 error if the user is unauthenticated. + responses: + 200: + description: The current user + content: + application/json: + schema: + type: object + properties: + result: + $ref: '#/components/schemas/UserResponseSchema' + 401: + $ref: '#/components/responses/401' + """ + if g.user is None or g.user.is_anonymous: + return self.response_401() + return self.response(200, result=user_response_schema.dump(g.user)) diff --git a/superset/users/schemas.py b/superset/users/schemas.py new file mode 100644 index 0000000000000..021209edafb8e --- /dev/null +++ b/superset/users/schemas.py @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from marshmallow import Schema +from marshmallow.fields import Boolean, Integer, String + + +class UserResponseSchema(Schema): + id = Integer() + username = String() + email = String() + first_name = String() + last_name = String() + is_active = Boolean() + is_anonymous = Boolean() diff --git a/superset/views/utils.py b/superset/views/utils.py index eda24863e1059..c318c38cbf17d 100644 --- a/superset/views/utils.py +++ b/superset/views/utils.py @@ -81,6 +81,7 @@ def bootstrap_user_data(user: User, include_perms: bool = False) -> Dict[str, An "lastName": user.last_name, "userId": user.id, "isActive": user.is_active, + "isAnonymous": user.is_anonymous, "createdOn": user.created_on.isoformat(), "email": user.email, } diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py index baa5fe35d7320..f1534692bed86 100644 --- a/tests/integration_tests/security_tests.py +++ b/tests/integration_tests/security_tests.py @@ -906,6 +906,7 @@ def test_views_are_secured(self): ["LocaleView", "index"], ["AuthDBView", "login"], ["AuthDBView", "logout"], + ["CurrentUserRestApi", "me"], ["Dashboard", "embedded"], ["R", "index"], ["Superset", "log"], diff --git a/tests/integration_tests/users/api_tests.py b/tests/integration_tests/users/api_tests.py new file mode 100644 index 0000000000000..ee965f6f2bf01 --- /dev/null +++ b/tests/integration_tests/users/api_tests.py @@ -0,0 +1,49 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# type: ignore +"""Unit tests for Superset""" +import json +from unittest.mock import patch + +from superset import security_manager +from tests.integration_tests.base_tests import SupersetTestCase + +meUri = "/api/v1/me/" + + +class TestCurrentUserApi(SupersetTestCase): + def test_get_me_logged_in(self): + self.login(username="admin") + + rv = self.client.get(meUri) + + self.assertEqual(200, rv.status_code) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual("admin", response["result"]["username"]) + self.assertEqual(True, response["result"]["is_active"]) + self.assertEqual(False, response["result"]["is_anonymous"]) + + def test_get_me_unauthorized(self): + self.logout() + rv = self.client.get(meUri) + self.assertEqual(401, rv.status_code) + + @patch("superset.security.manager.g") + def test_get_me_anonymous(self, mock_g): + mock_g.user = security_manager.get_anonymous_user + rv = self.client.get(meUri) + self.assertEqual(401, rv.status_code) From 0af671fa255027fa4bbea623d02cef9a8839d77a Mon Sep 17 00:00:00 2001 From: Jinghuayao <81597121+jinghua-qa@users.noreply.github.com> Date: Thu, 24 Feb 2022 10:24:14 -0800 Subject: [PATCH 03/37] test(native filter): add new native filter test for time column filter (#18850) * add new native filter test for time column filter --- .../dashboard/nativeFilters.test.ts | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/superset-frontend/cypress-base/cypress/integration/dashboard/nativeFilters.test.ts b/superset-frontend/cypress-base/cypress/integration/dashboard/nativeFilters.test.ts index 74785d05df5bf..414f14df5c598 100644 --- a/superset-frontend/cypress-base/cypress/integration/dashboard/nativeFilters.test.ts +++ b/superset-frontend/cypress-base/cypress/integration/dashboard/nativeFilters.test.ts @@ -501,6 +501,47 @@ describe('Nativefilters Sanity test', () => { .contains('Month') .should('be.visible'); }); + it('User can create a time column filter', () => { + cy.get(nativeFilters.filterFromDashboardView.expand).click({ force: true }); + cy.get(nativeFilters.filterFromDashboardView.createFilterButton) + .should('be.visible') + .click(); + cy.get(nativeFilters.filtersPanel.filterTypeInput) + .find(nativeFilters.filtersPanel.filterTypeItem) + .click({ force: true }); + cy.get('[label="Time column"]').click({ force: true }); + cy.get(nativeFilters.modal.container) + .find(nativeFilters.filtersPanel.filterName) + .click({ force: true }) + .clear() + .type('time column'); + cy.get(nativeFilters.modal.container) + .find(nativeFilters.filtersPanel.datasetName) + .click() + .type('wb_health_population'); + cy.get(nativeFilters.silentLoading).should('not.exist'); + cy.get('[label="wb_health_population"]').click(); + + cy.get(nativeFilters.modal.footer) + .contains('Save') + .should('be.visible') + .click(); + cy.intercept(`/api/v1/chart/data?form_data=**`).as('chart'); + cy.get(nativeFilters.modal.container).should('not.exist'); + // assert that native filter is created + cy.get(nativeFilters.filterFromDashboardView.filterName) + .should('be.visible') + .contains('time column'); + cy.get(nativeFilters.filterFromDashboardView.filterValueInput) + .should('be.visible', { timeout: 10000 }) + .click() + .type('year{enter}'); + cy.get(nativeFilters.applyFilter).click({ force: true }); + cy.wait('@chart'); + cy.get(nativeFilters.filterFromDashboardView.filterContent) + .contains('year') + .should('be.visible'); + }); }); xdescribe('Nativefilters', () => { From c1ee75dd8efc27dac85805720107703aa5916a9c Mon Sep 17 00:00:00 2001 From: Smart-Codi Date: Thu, 24 Feb 2022 18:34:21 +0000 Subject: [PATCH 04/37] fix: filter indicator pointing issue on popover (#18853) --- .../dashboard/components/FiltersBadge/DetailsPanel/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset-frontend/src/dashboard/components/FiltersBadge/DetailsPanel/index.tsx b/superset-frontend/src/dashboard/components/FiltersBadge/DetailsPanel/index.tsx index 54058236c1d2d..b5dfb0150d59e 100644 --- a/superset-frontend/src/dashboard/components/FiltersBadge/DetailsPanel/index.tsx +++ b/superset-frontend/src/dashboard/components/FiltersBadge/DetailsPanel/index.tsx @@ -279,7 +279,7 @@ const DetailsPanelPopover = ({ content={content} visible={visible} onVisibleChange={handlePopoverStatus} - placement="bottom" + placement="bottomRight" trigger="click" > {children} From 9fd18e98aca81a67d9bebf9cb488de1f802b670d Mon Sep 17 00:00:00 2001 From: michael-hoffman-26 <62829859+michael-hoffman-26@users.noreply.github.com> Date: Thu, 24 Feb 2022 20:50:55 +0200 Subject: [PATCH 05/37] refactor(ImportModelsCommand): clean code, extract logic into methods (#18866) Co-authored-by: michael_hoffman --- superset/commands/importers/v1/__init__.py | 42 +++++++++++++--------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/superset/commands/importers/v1/__init__.py b/superset/commands/importers/v1/__init__.py index 5440470a16331..89891f48e597a 100644 --- a/superset/commands/importers/v1/__init__.py +++ b/superset/commands/importers/v1/__init__.py @@ -33,7 +33,6 @@ class ImportModelsCommand(BaseCommand): - """Import models""" dao = BaseDAO @@ -73,14 +72,6 @@ def run(self) -> None: def validate(self) -> None: exceptions: List[ValidationError] = [] - # load existing databases so we can apply the password validation - db_passwords = { - str(uuid): password - for uuid, password in db.session.query( - Database.uuid, Database.password - ).all() - } - # verify that the metadata file is present and valid try: metadata: Optional[Dict[str, str]] = load_metadata(self.contents) @@ -88,7 +79,19 @@ def validate(self) -> None: exceptions.append(exc) metadata = None - # validate that the type declared in METADATA_FILE_NAME is correct + self._validate_metadata_type(metadata, exceptions) + self._load__configs(exceptions) + self._prevent_overwrite_existing_model(exceptions) + + if exceptions: + exception = CommandInvalidError(f"Error importing {self.model_name}") + exception.add_list(exceptions) + raise exception + + def _validate_metadata_type( + self, metadata: Optional[Dict[str, str]], exceptions: List[ValidationError] + ) -> None: + """Validate that the type declared in METADATA_FILE_NAME is correct""" if metadata and "type" in metadata: type_validator = validate.Equal(self.dao.model_cls.__name__) # type: ignore try: @@ -97,7 +100,14 @@ def validate(self) -> None: exc.messages = {METADATA_FILE_NAME: {"type": exc.messages}} exceptions.append(exc) - # validate objects + def _load__configs(self, exceptions: List[ValidationError]) -> None: + # load existing databases so we can apply the password validation + db_passwords: Dict[str, str] = { + str(uuid): password + for uuid, password in db.session.query( + Database.uuid, Database.password + ).all() + } for file_name, content in self.contents.items(): # skip directories if not content: @@ -121,7 +131,10 @@ def validate(self) -> None: exc.messages = {file_name: exc.messages} exceptions.append(exc) - # check if the object exists and shouldn't be overwritten + def _prevent_overwrite_existing_model( # pylint: disable=invalid-name + self, exceptions: List[ValidationError] + ) -> None: + """check if the object exists and shouldn't be overwritten""" if not self.overwrite: existing_uuids = self._get_uuids() for file_name, config in self._configs.items(): @@ -139,8 +152,3 @@ def validate(self) -> None: } ) ) - - if exceptions: - exception = CommandInvalidError(f"Error importing {self.model_name}") - exception.add_list(exceptions) - raise exception From 00c99c91ec302a13978c5fd3a1099d6f4648a20e Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Thu, 24 Feb 2022 11:02:01 -0800 Subject: [PATCH 06/37] feat: new dataset/table/column models (#17543) * feat: new models for SIP-68 * feat: new dataset models DB migration * Add shadow write (WIP) * Physical dataset shadow write (WIP) * Virtual dataset shadow write (WIP) * Update migration to populate models * Cascade delete columns * after_delete * Update hook * Add metric test * Do not rename tables * Small fixes * Fix migration * Fix tests * Fix more tests * Even more tests * Addressing comments (WIP) * Fix migration * Rebase and update head * Fix unit tests * Add external management cols * Small fixes --- superset/columns/__init__.py | 16 + superset/columns/models.py | 99 ++ superset/columns/schemas.py | 40 + superset/connectors/sqla/models.py | 487 ++++++- superset/datasets/models.py | 92 ++ superset/datasets/schemas.py | 14 + .../b8d3a24d9131_new_dataset_models.py | 598 ++++++++ superset/models/helpers.py | 20 + superset/security/manager.py | 2 + superset/tables/__init__.py | 16 + superset/tables/models.py | 92 ++ superset/tables/schemas.py | 40 + .../dashboards/filter_sets/conftest.py | 195 ++- .../filter_sets/create_api_tests.py | 4 +- .../filter_sets/delete_api_tests.py | 8 +- .../dashboards/filter_sets/get_api_tests.py | 4 +- .../filter_sets/update_api_tests.py | 8 +- .../dashboards/superset_factory_util.py | 34 +- tests/integration_tests/datasets/api_tests.py | 1 + tests/integration_tests/security_tests.py | 19 +- tests/unit_tests/columns/__init__.py | 16 + tests/unit_tests/columns/test_models.py | 53 + .../commands/importers/v1/import_test.py | 14 +- tests/unit_tests/datasets/test_models.py | 1244 +++++++++++++++++ tests/unit_tests/tables/__init__.py | 16 + tests/unit_tests/tables/test_models.py | 50 + 26 files changed, 3013 insertions(+), 169 deletions(-) create mode 100644 superset/columns/__init__.py create mode 100644 superset/columns/models.py create mode 100644 superset/columns/schemas.py create mode 100644 superset/datasets/models.py create mode 100644 superset/migrations/versions/b8d3a24d9131_new_dataset_models.py create mode 100644 superset/tables/__init__.py create mode 100644 superset/tables/models.py create mode 100644 superset/tables/schemas.py create mode 100644 tests/unit_tests/columns/__init__.py create mode 100644 tests/unit_tests/columns/test_models.py create mode 100644 tests/unit_tests/datasets/test_models.py create mode 100644 tests/unit_tests/tables/__init__.py create mode 100644 tests/unit_tests/tables/test_models.py diff --git a/superset/columns/__init__.py b/superset/columns/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/columns/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/columns/models.py b/superset/columns/models.py new file mode 100644 index 0000000000000..039f73ff57579 --- /dev/null +++ b/superset/columns/models.py @@ -0,0 +1,99 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +Column model. + +This model was introduced in SIP-68 (https://github.com/apache/superset/issues/14909), +and represents a "column" in a table or dataset. In addition to a column, new models for +tables, metrics, and datasets were also introduced. + +These models are not fully implemented, and shouldn't be used yet. +""" + +import sqlalchemy as sa +from flask_appbuilder import Model + +from superset.models.helpers import ( + AuditMixinNullable, + ExtraJSONMixin, + ImportExportMixin, +) + + +class Column( + Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin, +): + """ + A "column". + + The definition of column here is overloaded: it can represent a physical column in a + database relation (table or view); a computed/derived column on a dataset; or an + aggregation expression representing a metric. + """ + + __tablename__ = "sl_columns" + + id = sa.Column(sa.Integer, primary_key=True) + + # We use ``sa.Text`` for these attributes because (1) in modern databases the + # performance is the same as ``VARCHAR``[1] and (2) because some table names can be + # **really** long (eg, Google Sheets URLs). + # + # [1] https://www.postgresql.org/docs/9.1/datatype-character.html + name = sa.Column(sa.Text) + type = sa.Column(sa.Text) + + # Columns are defined by expressions. For tables, these are the actual columns names, + # and should match the ``name`` attribute. For datasets, these can be any valid SQL + # expression. If the SQL expression is an aggregation the column is a metric, + # otherwise it's a computed column. + expression = sa.Column(sa.Text) + + # Does the expression point directly to a physical column? + is_physical = sa.Column(sa.Boolean, default=True) + + # Additional metadata describing the column. + description = sa.Column(sa.Text) + warning_text = sa.Column(sa.Text) + unit = sa.Column(sa.Text) + + # Is this a time column? Useful for plotting time series. + is_temporal = sa.Column(sa.Boolean, default=False) + + # Is this a spatial column? This could be leveraged in the future for spatial + # visualizations. + is_spatial = sa.Column(sa.Boolean, default=False) + + # Is this column a partition? Useful for scheduling queries and previewing the latest + # data. + is_partition = sa.Column(sa.Boolean, default=False) + + # Is this column an aggregation (metric)? + is_aggregation = sa.Column(sa.Boolean, default=False) + + # Assuming the column is an aggregation, is it additive? Useful for determining which + # aggregations can be done on the metric. Eg, ``COUNT(DISTINCT user_id)`` is not + # additive, so it shouldn't be used in a ``SUM``. + is_additive = sa.Column(sa.Boolean, default=False) + + # Is an increase desired? Useful for displaying the results of A/B tests, or setting + # up alerts. Eg, this is true for "revenue", but false for "latency". + is_increase_desired = sa.Column(sa.Boolean, default=True) + + # Column is managed externally and should be read-only inside Superset + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + external_url = sa.Column(sa.Text, nullable=True) diff --git a/superset/columns/schemas.py b/superset/columns/schemas.py new file mode 100644 index 0000000000000..5368bfbcca7ec --- /dev/null +++ b/superset/columns/schemas.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +Schema for the column model. + +This model was introduced in SIP-68 (https://github.com/apache/superset/issues/14909), +and represents a "column" in a table or dataset. In addition to a column, new models for +tables, metrics, and datasets were also introduced. + +These models are not fully implemented, and shouldn't be used yet. +""" + +from marshmallow_sqlalchemy import SQLAlchemyAutoSchema + +from superset.columns.models import Column + + +class ColumnSchema(SQLAlchemyAutoSchema): + """ + Schema for the ``Column`` model. + """ + + class Meta: # pylint: disable=too-few-public-methods + model = Column + load_instance = True + include_relationships = True diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index ca1d4bc57a022..81a53e74dae3b 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines, redefined-outer-name import dataclasses import json import logging @@ -53,6 +53,7 @@ desc, Enum, ForeignKey, + inspect, Integer, or_, select, @@ -71,12 +72,14 @@ from sqlalchemy.sql.selectable import Alias, TableClause from superset import app, db, is_feature_enabled, security_manager +from superset.columns.models import Column as NewColumn from superset.common.db_query_status import QueryStatus from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric from superset.connectors.sqla.utils import ( get_physical_table_metadata, get_virtual_table_metadata, ) +from superset.datasets.models import Dataset as NewDataset from superset.db_engine_specs.base import BaseEngineSpec, CTE_ALIAS, TimestampExpression from superset.exceptions import QueryObjectValidationError from superset.jinja_context import ( @@ -86,8 +89,14 @@ ) from superset.models.annotations import Annotation from superset.models.core import Database -from superset.models.helpers import AuditMixinNullable, CertificationMixin, QueryResult +from superset.models.helpers import ( + AuditMixinNullable, + CertificationMixin, + clone_model, + QueryResult, +) from superset.sql_parse import ParsedQuery +from superset.tables.models import Table as NewTable from superset.typing import AdhocColumn, AdhocMetric, Metric, OrderBy, QueryObjectDict from superset.utils import core as utils from superset.utils.core import ( @@ -104,6 +113,13 @@ VIRTUAL_TABLE_ALIAS = "virtual_table" +# a non-exhaustive set of additive metrics +ADDITIVE_METRIC_TYPES = { + "count", + "sum", + "doubleSum", +} + class SqlaQuery(NamedTuple): applied_template_filters: List[str] @@ -1830,23 +1846,474 @@ def before_update( raise Exception(get_dataset_exist_error_msg(target.full_name)) @staticmethod - def update_table( - _mapper: Mapper, _connection: Connection, obj: Union[SqlMetric, TableColumn] + def update_table( # pylint: disable=unused-argument + mapper: Mapper, connection: Connection, target: Union[SqlMetric, TableColumn] ) -> None: """ Forces an update to the table's changed_on value when a metric or column on the table is updated. This busts the cache key for all charts that use the table. - :param _mapper: Unused. - :param _connection: Unused. - :param obj: The metric or column that was updated. + :param mapper: Unused. + :param connection: Unused. + :param target: The metric or column that was updated. + """ + inspector = inspect(target) + session = inspector.session + + # get DB-specific conditional quoter for expressions that point to columns or + # table names + database = ( + target.table.database + or session.query(Database).filter_by(id=target.database_id).one() + ) + engine = database.get_sqla_engine(schema=target.table.schema) + conditional_quote = engine.dialect.identifier_preparer.quote + + session.execute(update(SqlaTable).where(SqlaTable.id == target.table.id)) + + # update ``Column`` model as well + dataset = ( + session.query(NewDataset).filter_by(sqlatable_id=target.table.id).one() + ) + + if isinstance(target, TableColumn): + columns = [ + column + for column in dataset.columns + if column.name == target.column_name + ] + if not columns: + return + + column = columns[0] + extra_json = json.loads(target.extra or "{}") + for attr in {"groupby", "filterable", "verbose_name", "python_date_format"}: + value = getattr(target, attr) + if value: + extra_json[attr] = value + + column.name = target.column_name + column.type = target.type or "Unknown" + column.expression = target.expression or conditional_quote( + target.column_name + ) + column.description = target.description + column.is_temporal = target.is_dttm + column.is_physical = target.expression is None + column.extra_json = json.dumps(extra_json) if extra_json else None + + else: # SqlMetric + columns = [ + column + for column in dataset.columns + if column.name == target.metric_name + ] + if not columns: + return + + column = columns[0] + extra_json = json.loads(target.extra or "{}") + for attr in {"verbose_name", "metric_type", "d3format"}: + value = getattr(target, attr) + if value: + extra_json[attr] = value + + is_additive = ( + target.metric_type + and target.metric_type.lower() in ADDITIVE_METRIC_TYPES + ) + + column.name = target.metric_name + column.expression = target.expression + column.warning_text = target.warning_text + column.description = target.description + column.is_additive = is_additive + column.extra_json = json.dumps(extra_json) if extra_json else None + + @staticmethod + def after_insert( # pylint: disable=too-many-locals + mapper: Mapper, connection: Connection, target: "SqlaTable", + ) -> None: + """ + Shadow write the dataset to new models. + + The ``SqlaTable`` model is currently being migrated to two new models, ``Table`` + and ``Dataset``. In the first phase of the migration the new models are populated + whenever ``SqlaTable`` is modified (created, updated, or deleted). + + In the second phase of the migration reads will be done from the new models. + Finally, in the third phase of the migration the old models will be removed. + + For more context: https://github.com/apache/superset/issues/14909 + """ + # set permissions + security_manager.set_perm(mapper, connection, target) + + session = inspect(target).session + + # get DB-specific conditional quoter for expressions that point to columns or + # table names + database = ( + target.database + or session.query(Database).filter_by(id=target.database_id).one() + ) + engine = database.get_sqla_engine(schema=target.schema) + conditional_quote = engine.dialect.identifier_preparer.quote + + # create columns + columns = [] + for column in target.columns: + # ``is_active`` might be ``None`` at this point, but it defaults to ``True``. + if column.is_active is False: + continue + + extra_json = json.loads(column.extra or "{}") + for attr in {"groupby", "filterable", "verbose_name", "python_date_format"}: + value = getattr(column, attr) + if value: + extra_json[attr] = value + + columns.append( + NewColumn( + name=column.column_name, + type=column.type or "Unknown", + expression=column.expression + or conditional_quote(column.column_name), + description=column.description, + is_temporal=column.is_dttm, + is_aggregation=False, + is_physical=column.expression is None, + is_spatial=False, + is_partition=False, + is_increase_desired=True, + extra_json=json.dumps(extra_json) if extra_json else None, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ), + ) + + # create metrics + for metric in target.metrics: + extra_json = json.loads(metric.extra or "{}") + for attr in {"verbose_name", "metric_type", "d3format"}: + value = getattr(metric, attr) + if value: + extra_json[attr] = value + + is_additive = ( + metric.metric_type + and metric.metric_type.lower() in ADDITIVE_METRIC_TYPES + ) + + columns.append( + NewColumn( + name=metric.metric_name, + type="Unknown", # figuring this out would require a type inferrer + expression=metric.expression, + warning_text=metric.warning_text, + description=metric.description, + is_aggregation=True, + is_additive=is_additive, + is_physical=False, + is_spatial=False, + is_partition=False, + is_increase_desired=True, + extra_json=json.dumps(extra_json) if extra_json else None, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ), + ) + + # physical dataset + tables = [] + if target.sql is None: + physical_columns = [column for column in columns if column.is_physical] + + # create table + table = NewTable( + name=target.table_name, + schema=target.schema, + catalog=None, # currently not supported + database_id=target.database_id, + columns=physical_columns, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ) + tables.append(table) + + # virtual dataset + else: + # mark all columns as virtual (not physical) + for column in columns: + column.is_physical = False + + # find referenced tables + parsed = ParsedQuery(target.sql) + referenced_tables = parsed.tables + + # predicate for finding the referenced tables + predicate = or_( + *[ + and_( + NewTable.schema == (table.schema or target.schema), + NewTable.name == table.table, + ) + for table in referenced_tables + ] + ) + tables = session.query(NewTable).filter(predicate).all() + + # create the new dataset + dataset = NewDataset( + sqlatable_id=target.id, + name=target.table_name, + expression=target.sql or conditional_quote(target.table_name), + tables=tables, + columns=columns, + is_physical=target.sql is None, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ) + session.add(dataset) + + @staticmethod + def after_delete( # pylint: disable=unused-argument + mapper: Mapper, connection: Connection, target: "SqlaTable", + ) -> None: + """ + Shadow write the dataset to new models. + + The ``SqlaTable`` model is currently being migrated to two new models, ``Table`` + and ``Dataset``. In the first phase of the migration the new models are populated + whenever ``SqlaTable`` is modified (created, updated, or deleted). + + In the second phase of the migration reads will be done from the new models. + Finally, in the third phase of the migration the old models will be removed. + + For more context: https://github.com/apache/superset/issues/14909 + """ + session = inspect(target).session + dataset = ( + session.query(NewDataset).filter_by(sqlatable_id=target.id).one_or_none() + ) + if dataset: + session.delete(dataset) + + @staticmethod + def after_update( # pylint: disable=too-many-branches, too-many-locals, too-many-statements + mapper: Mapper, connection: Connection, target: "SqlaTable", + ) -> None: + """ + Shadow write the dataset to new models. + + The ``SqlaTable`` model is currently being migrated to two new models, ``Table`` + and ``Dataset``. In the first phase of the migration the new models are populated + whenever ``SqlaTable`` is modified (created, updated, or deleted). + + In the second phase of the migration reads will be done from the new models. + Finally, in the third phase of the migration the old models will be removed. + + For more context: https://github.com/apache/superset/issues/14909 """ - db.session.execute(update(SqlaTable).where(SqlaTable.id == obj.table.id)) + inspector = inspect(target) + session = inspector.session + + # double-check that ``UPDATE``s are actually pending (this method is called even + # for instances that have no net changes to their column-based attributes) + if not session.is_modified(target, include_collections=True): + return + + # set permissions + security_manager.set_perm(mapper, connection, target) + + dataset = ( + session.query(NewDataset).filter_by(sqlatable_id=target.id).one_or_none() + ) + if not dataset: + return + + # get DB-specific conditional quoter for expressions that point to columns or + # table names + database = ( + target.database + or session.query(Database).filter_by(id=target.database_id).one() + ) + engine = database.get_sqla_engine(schema=target.schema) + conditional_quote = engine.dialect.identifier_preparer.quote + + # update columns + if inspector.attrs.columns.history.has_changes(): + # handle deleted columns + if inspector.attrs.columns.history.deleted: + column_names = { + column.column_name + for column in inspector.attrs.columns.history.deleted + } + dataset.columns = [ + column + for column in dataset.columns + if column.name not in column_names + ] + + # handle inserted columns + for column in inspector.attrs.columns.history.added: + # ``is_active`` might be ``None``, but it defaults to ``True``. + if column.is_active is False: + continue + + extra_json = json.loads(column.extra or "{}") + for attr in { + "groupby", + "filterable", + "verbose_name", + "python_date_format", + }: + value = getattr(column, attr) + if value: + extra_json[attr] = value + + dataset.columns.append( + NewColumn( + name=column.column_name, + type=column.type or "Unknown", + expression=column.expression + or conditional_quote(column.column_name), + description=column.description, + is_temporal=column.is_dttm, + is_aggregation=False, + is_physical=column.expression is None, + is_spatial=False, + is_partition=False, + is_increase_desired=True, + extra_json=json.dumps(extra_json) if extra_json else None, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ) + ) + + # update metrics + if inspector.attrs.metrics.history.has_changes(): + # handle deleted metrics + if inspector.attrs.metrics.history.deleted: + column_names = { + metric.metric_name + for metric in inspector.attrs.metrics.history.deleted + } + dataset.columns = [ + column + for column in dataset.columns + if column.name not in column_names + ] + + # handle inserted metrics + for metric in inspector.attrs.metrics.history.added: + extra_json = json.loads(metric.extra or "{}") + for attr in {"verbose_name", "metric_type", "d3format"}: + value = getattr(metric, attr) + if value: + extra_json[attr] = value + + is_additive = ( + metric.metric_type + and metric.metric_type.lower() in ADDITIVE_METRIC_TYPES + ) + + dataset.columns.append( + NewColumn( + name=metric.metric_name, + type="Unknown", + expression=metric.expression, + warning_text=metric.warning_text, + description=metric.description, + is_aggregation=True, + is_additive=is_additive, + is_physical=False, + is_spatial=False, + is_partition=False, + is_increase_desired=True, + extra_json=json.dumps(extra_json) if extra_json else None, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ) + ) + + # physical dataset + if target.sql is None: + physical_columns = [ + column for column in dataset.columns if column.is_physical + ] + + # if the table name changed we should create a new table instance, instead + # of reusing the original one + if ( + inspector.attrs.table_name.history.has_changes() + or inspector.attrs.schema.history.has_changes() + or inspector.attrs.database_id.history.has_changes() + ): + # does the dataset point to an existing table? + table = ( + session.query(NewTable) + .filter_by( + database_id=target.database_id, + schema=target.schema, + name=target.table_name, + ) + .first() + ) + if not table: + # create new columns + physical_columns = [ + clone_model(column, ignore=["uuid"]) + for column in physical_columns + ] + + # create new table + table = NewTable( + name=target.table_name, + schema=target.schema, + catalog=None, + database_id=target.database_id, + columns=physical_columns, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ) + dataset.tables = [table] + elif dataset.tables: + table = dataset.tables[0] + table.columns = physical_columns + + # virtual dataset + else: + # mark all columns as virtual (not physical) + for column in dataset.columns: + column.is_physical = False + + # update referenced tables if SQL changed + if inspector.attrs.sql.history.has_changes(): + parsed = ParsedQuery(target.sql) + referenced_tables = parsed.tables + + predicate = or_( + *[ + and_( + NewTable.schema == (table.schema or target.schema), + NewTable.name == table.table, + ) + for table in referenced_tables + ] + ) + dataset.tables = session.query(NewTable).filter(predicate).all() + + # update other attributes + dataset.name = target.table_name + dataset.expression = target.sql or conditional_quote(target.table_name) + dataset.is_physical = target.sql is None -sa.event.listen(SqlaTable, "after_insert", security_manager.set_perm) -sa.event.listen(SqlaTable, "after_update", security_manager.set_perm) sa.event.listen(SqlaTable, "before_update", SqlaTable.before_update) +sa.event.listen(SqlaTable, "after_insert", SqlaTable.after_insert) +sa.event.listen(SqlaTable, "after_delete", SqlaTable.after_delete) +sa.event.listen(SqlaTable, "after_update", SqlaTable.after_update) sa.event.listen(SqlMetric, "after_update", SqlaTable.update_table) sa.event.listen(TableColumn, "after_update", SqlaTable.update_table) diff --git a/superset/datasets/models.py b/superset/datasets/models.py new file mode 100644 index 0000000000000..56a6fbf4000e3 --- /dev/null +++ b/superset/datasets/models.py @@ -0,0 +1,92 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +Dataset model. + +This model was introduced in SIP-68 (https://github.com/apache/superset/issues/14909), +and represents a "dataset" -- either a physical table or a virtual. In addition to a +dataset, new models for columns, metrics, and tables were also introduced. + +These models are not fully implemented, and shouldn't be used yet. +""" + +from typing import List + +import sqlalchemy as sa +from flask_appbuilder import Model +from sqlalchemy.orm import relationship + +from superset.columns.models import Column +from superset.models.helpers import ( + AuditMixinNullable, + ExtraJSONMixin, + ImportExportMixin, +) +from superset.tables.models import Table + +column_association_table = sa.Table( + "sl_dataset_columns", + Model.metadata, # pylint: disable=no-member + sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id")), + sa.Column("column_id", sa.ForeignKey("sl_columns.id")), +) + +table_association_table = sa.Table( + "sl_dataset_tables", + Model.metadata, # pylint: disable=no-member + sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id")), + sa.Column("table_id", sa.ForeignKey("sl_tables.id")), +) + + +class Dataset(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin): + """ + A table/view in a database. + """ + + __tablename__ = "sl_datasets" + + id = sa.Column(sa.Integer, primary_key=True) + + # A temporary column, used for shadow writing to the new model. Once the ``SqlaTable`` + # model has been deleted this column can be removed. + sqlatable_id = sa.Column(sa.Integer, nullable=True, unique=True) + + # We use ``sa.Text`` for these attributes because (1) in modern databases the + # performance is the same as ``VARCHAR``[1] and (2) because some table names can be + # **really** long (eg, Google Sheets URLs). + # + # [1] https://www.postgresql.org/docs/9.1/datatype-character.html + name = sa.Column(sa.Text) + + expression = sa.Column(sa.Text) + + # n:n relationship + tables: List[Table] = relationship("Table", secondary=table_association_table) + + # The relationship between datasets and columns is 1:n, but we use a many-to-many + # association to differentiate between the relationship between tables and columns. + columns: List[Column] = relationship( + "Column", secondary=column_association_table, cascade="all, delete" + ) + + # Does the dataset point directly to a ``Table``? + is_physical = sa.Column(sa.Boolean, default=False) + + # Column is managed externally and should be read-only inside Superset + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + external_url = sa.Column(sa.Text, nullable=True) diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py index 06b13a0e121ef..775798d274fa7 100644 --- a/superset/datasets/schemas.py +++ b/superset/datasets/schemas.py @@ -21,6 +21,9 @@ from flask_babel import lazy_gettext as _ from marshmallow import fields, pre_load, Schema, ValidationError from marshmallow.validate import Length +from marshmallow_sqlalchemy import SQLAlchemyAutoSchema + +from superset.datasets.models import Dataset get_delete_ids_schema = {"type": "array", "items": {"type": "integer"}} get_export_ids_schema = {"type": "array", "items": {"type": "integer"}} @@ -209,3 +212,14 @@ def fix_extra(self, data: Dict[str, Any], **kwargs: Any) -> Dict[str, Any]: version = fields.String(required=True) database_uuid = fields.UUID(required=True) data = fields.URL() + + +class DatasetSchema(SQLAlchemyAutoSchema): + """ + Schema for the ``Dataset`` model. + """ + + class Meta: # pylint: disable=too-few-public-methods + model = Dataset + load_instance = True + include_relationships = True diff --git a/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py b/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py new file mode 100644 index 0000000000000..e6d4537272ff3 --- /dev/null +++ b/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py @@ -0,0 +1,598 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=too-few-public-methods +"""New dataset models + +Revision ID: b8d3a24d9131 +Revises: 5afbb1a5849b +Create Date: 2021-11-11 16:41:53.266965 + +""" + +import json +from typing import Any, Dict, List, Optional, Type +from uuid import uuid4 + +import sqlalchemy as sa +from alembic import op +from sqlalchemy import and_, inspect, or_ +from sqlalchemy.engine import create_engine, Engine +from sqlalchemy.engine.url import make_url, URL +from sqlalchemy.exc import ArgumentError +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import backref, relationship, Session +from sqlalchemy.schema import UniqueConstraint +from sqlalchemy_utils import UUIDType + +from superset import app, db, db_engine_specs +from superset.connectors.sqla.models import ADDITIVE_METRIC_TYPES +from superset.extensions import encrypted_field_factory, security_manager +from superset.sql_parse import ParsedQuery +from superset.utils.memoized import memoized + +# revision identifiers, used by Alembic. +revision = "b8d3a24d9131" +down_revision = "5afbb1a5849b" + +Base = declarative_base() +custom_password_store = app.config["SQLALCHEMY_CUSTOM_PASSWORD_STORE"] +DB_CONNECTION_MUTATOR = app.config["DB_CONNECTION_MUTATOR"] + + +class Database(Base): + + __tablename__ = "dbs" + __table_args__ = (UniqueConstraint("database_name"),) + + id = sa.Column(sa.Integer, primary_key=True) + database_name = sa.Column(sa.String(250), unique=True, nullable=False) + sqlalchemy_uri = sa.Column(sa.String(1024), nullable=False) + password = sa.Column(encrypted_field_factory.create(sa.String(1024))) + impersonate_user = sa.Column(sa.Boolean, default=False) + encrypted_extra = sa.Column(encrypted_field_factory.create(sa.Text), nullable=True) + extra = sa.Column( + sa.Text, + default=json.dumps( + dict( + metadata_params={}, + engine_params={}, + metadata_cache_timeout={}, + schemas_allowed_for_file_upload=[], + ) + ), + ) + server_cert = sa.Column(encrypted_field_factory.create(sa.Text), nullable=True) + + @property + def sqlalchemy_uri_decrypted(self) -> str: + try: + url = make_url(self.sqlalchemy_uri) + except (ArgumentError, ValueError): + return "dialect://invalid_uri" + if custom_password_store: + url.password = custom_password_store(url) + else: + url.password = self.password + return str(url) + + @property + def backend(self) -> str: + sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted) + return sqlalchemy_url.get_backend_name() # pylint: disable=no-member + + @classmethod + @memoized + def get_db_engine_spec_for_backend( + cls, backend: str + ) -> Type[db_engine_specs.BaseEngineSpec]: + engines = db_engine_specs.get_engine_specs() + return engines.get(backend, db_engine_specs.BaseEngineSpec) + + @property + def db_engine_spec(self) -> Type[db_engine_specs.BaseEngineSpec]: + return self.get_db_engine_spec_for_backend(self.backend) + + def get_extra(self) -> Dict[str, Any]: + return self.db_engine_spec.get_extra_params(self) + + def get_effective_user( + self, object_url: URL, user_name: Optional[str] = None, + ) -> Optional[str]: + effective_username = None + if self.impersonate_user: + effective_username = object_url.username + if user_name: + effective_username = user_name + + return effective_username + + def get_encrypted_extra(self) -> Dict[str, Any]: + return json.loads(self.encrypted_extra) if self.encrypted_extra else {} + + @memoized(watch=("impersonate_user", "sqlalchemy_uri_decrypted", "extra")) + def get_sqla_engine(self, schema: Optional[str] = None) -> Engine: + extra = self.get_extra() + sqlalchemy_url = make_url(self.sqlalchemy_uri_decrypted) + self.db_engine_spec.adjust_database_uri(sqlalchemy_url, schema) + effective_username = self.get_effective_user(sqlalchemy_url, "admin") + # If using MySQL or Presto for example, will set url.username + self.db_engine_spec.modify_url_for_impersonation( + sqlalchemy_url, self.impersonate_user, effective_username + ) + + params = extra.get("engine_params", {}) + connect_args = params.get("connect_args", {}) + if self.impersonate_user: + self.db_engine_spec.update_impersonation_config( + connect_args, str(sqlalchemy_url), effective_username + ) + + if connect_args: + params["connect_args"] = connect_args + + params.update(self.get_encrypted_extra()) + + if DB_CONNECTION_MUTATOR: + sqlalchemy_url, params = DB_CONNECTION_MUTATOR( + sqlalchemy_url, + params, + effective_username, + security_manager, + "migration", + ) + + return create_engine(sqlalchemy_url, **params) + + +class TableColumn(Base): + + __tablename__ = "table_columns" + __table_args__ = (UniqueConstraint("table_id", "column_name"),) + + id = sa.Column(sa.Integer, primary_key=True) + table_id = sa.Column(sa.Integer, sa.ForeignKey("tables.id")) + is_active = sa.Column(sa.Boolean, default=True) + extra = sa.Column(sa.Text) + column_name = sa.Column(sa.String(255), nullable=False) + type = sa.Column(sa.String(32)) + expression = sa.Column(sa.Text) + description = sa.Column(sa.Text) + is_dttm = sa.Column(sa.Boolean, default=False) + filterable = sa.Column(sa.Boolean, default=True) + groupby = sa.Column(sa.Boolean, default=True) + verbose_name = sa.Column(sa.String(1024)) + python_date_format = sa.Column(sa.String(255)) + + +class SqlMetric(Base): + + __tablename__ = "sql_metrics" + __table_args__ = (UniqueConstraint("table_id", "metric_name"),) + + id = sa.Column(sa.Integer, primary_key=True) + table_id = sa.Column(sa.Integer, sa.ForeignKey("tables.id")) + extra = sa.Column(sa.Text) + metric_type = sa.Column(sa.String(32)) + metric_name = sa.Column(sa.String(255), nullable=False) + expression = sa.Column(sa.Text, nullable=False) + warning_text = sa.Column(sa.Text) + description = sa.Column(sa.Text) + d3format = sa.Column(sa.String(128)) + verbose_name = sa.Column(sa.String(1024)) + + +class SqlaTable(Base): + + __tablename__ = "tables" + __table_args__ = (UniqueConstraint("database_id", "schema", "table_name"),) + + def fetch_columns_and_metrics(self, session: Session) -> None: + self.columns = session.query(TableColumn).filter( + TableColumn.table_id == self.id + ) + self.metrics = session.query(SqlMetric).filter(TableColumn.table_id == self.id) + + id = sa.Column(sa.Integer, primary_key=True) + columns: List[TableColumn] = [] + column_class = TableColumn + metrics: List[SqlMetric] = [] + metric_class = SqlMetric + + database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) + database: Database = relationship( + "Database", + backref=backref("tables", cascade="all, delete-orphan"), + foreign_keys=[database_id], + ) + schema = sa.Column(sa.String(255)) + table_name = sa.Column(sa.String(250), nullable=False) + sql = sa.Column(sa.Text) + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + external_url = sa.Column(sa.Text, nullable=True) + + +table_column_association_table = sa.Table( + "sl_table_columns", + Base.metadata, + sa.Column("table_id", sa.ForeignKey("sl_tables.id")), + sa.Column("column_id", sa.ForeignKey("sl_columns.id")), +) + +dataset_column_association_table = sa.Table( + "sl_dataset_columns", + Base.metadata, + sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id")), + sa.Column("column_id", sa.ForeignKey("sl_columns.id")), +) + +dataset_table_association_table = sa.Table( + "sl_dataset_tables", + Base.metadata, + sa.Column("dataset_id", sa.ForeignKey("sl_datasets.id")), + sa.Column("table_id", sa.ForeignKey("sl_tables.id")), +) + + +class NewColumn(Base): + + __tablename__ = "sl_columns" + + id = sa.Column(sa.Integer, primary_key=True) + name = sa.Column(sa.Text) + type = sa.Column(sa.Text) + expression = sa.Column(sa.Text) + is_physical = sa.Column(sa.Boolean, default=True) + description = sa.Column(sa.Text) + warning_text = sa.Column(sa.Text) + is_temporal = sa.Column(sa.Boolean, default=False) + is_aggregation = sa.Column(sa.Boolean, default=False) + is_additive = sa.Column(sa.Boolean, default=False) + is_spatial = sa.Column(sa.Boolean, default=False) + is_partition = sa.Column(sa.Boolean, default=False) + is_increase_desired = sa.Column(sa.Boolean, default=True) + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + external_url = sa.Column(sa.Text, nullable=True) + extra_json = sa.Column(sa.Text, default="{}") + + +class NewTable(Base): + + __tablename__ = "sl_tables" + __table_args__ = (UniqueConstraint("database_id", "catalog", "schema", "name"),) + + id = sa.Column(sa.Integer, primary_key=True) + name = sa.Column(sa.Text) + schema = sa.Column(sa.Text) + catalog = sa.Column(sa.Text) + database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) + database: Database = relationship( + "Database", + backref=backref("new_tables", cascade="all, delete-orphan"), + foreign_keys=[database_id], + ) + columns: List[NewColumn] = relationship( + "NewColumn", secondary=table_column_association_table, cascade="all, delete" + ) + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + external_url = sa.Column(sa.Text, nullable=True) + + +class NewDataset(Base): + + __tablename__ = "sl_datasets" + + id = sa.Column(sa.Integer, primary_key=True) + sqlatable_id = sa.Column(sa.Integer, nullable=True, unique=True) + name = sa.Column(sa.Text) + expression = sa.Column(sa.Text) + tables: List[NewTable] = relationship( + "NewTable", secondary=dataset_table_association_table + ) + columns: List[NewColumn] = relationship( + "NewColumn", secondary=dataset_column_association_table, cascade="all, delete" + ) + is_physical = sa.Column(sa.Boolean, default=False) + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + external_url = sa.Column(sa.Text, nullable=True) + + +def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals + """ + Copy old datasets to the new models. + """ + session = inspect(target).session + + # get DB-specific conditional quoter for expressions that point to columns or + # table names + database = ( + target.database + or session.query(Database).filter_by(id=target.database_id).one() + ) + engine = database.get_sqla_engine(schema=target.schema) + conditional_quote = engine.dialect.identifier_preparer.quote + + # create columns + columns = [] + for column in target.columns: + # ``is_active`` might be ``None`` at this point, but it defaults to ``True``. + if column.is_active is False: + continue + + extra_json = json.loads(column.extra or "{}") + for attr in {"groupby", "filterable", "verbose_name", "python_date_format"}: + value = getattr(column, attr) + if value: + extra_json[attr] = value + + columns.append( + NewColumn( + name=column.column_name, + type=column.type or "Unknown", + expression=column.expression or conditional_quote(column.column_name), + description=column.description, + is_temporal=column.is_dttm, + is_aggregation=False, + is_physical=column.expression is None or column.expression == "", + is_spatial=False, + is_partition=False, + is_increase_desired=True, + extra_json=json.dumps(extra_json) if extra_json else None, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ), + ) + + # create metrics + for metric in target.metrics: + extra_json = json.loads(metric.extra or "{}") + for attr in {"verbose_name", "metric_type", "d3format"}: + value = getattr(metric, attr) + if value: + extra_json[attr] = value + + is_additive = ( + metric.metric_type and metric.metric_type.lower() in ADDITIVE_METRIC_TYPES + ) + + columns.append( + NewColumn( + name=metric.metric_name, + type="Unknown", # figuring this out would require a type inferrer + expression=metric.expression, + warning_text=metric.warning_text, + description=metric.description, + is_aggregation=True, + is_additive=is_additive, + is_physical=False, + is_spatial=False, + is_partition=False, + is_increase_desired=True, + extra_json=json.dumps(extra_json) if extra_json else None, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ), + ) + + # physical dataset + tables = [] + if target.sql is None: + physical_columns = [column for column in columns if column.is_physical] + + # create table + table = NewTable( + name=target.table_name, + schema=target.schema, + catalog=None, # currently not supported + database_id=target.database_id, + columns=physical_columns, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ) + tables.append(table) + + # virtual dataset + else: + # mark all columns as virtual (not physical) + for column in columns: + column.is_physical = False + + # find referenced tables + parsed = ParsedQuery(target.sql) + referenced_tables = parsed.tables + + # predicate for finding the referenced tables + predicate = or_( + *[ + and_( + NewTable.schema == (table.schema or target.schema), + NewTable.name == table.table, + ) + for table in referenced_tables + ] + ) + tables = session.query(NewTable).filter(predicate).all() + + # create the new dataset + dataset = NewDataset( + sqlatable_id=target.id, + name=target.table_name, + expression=target.sql or conditional_quote(target.table_name), + tables=tables, + columns=columns, + is_physical=target.sql is None, + is_managed_externally=target.is_managed_externally, + external_url=target.external_url, + ) + session.add(dataset) + + +def upgrade(): + # Create tables for the new models. + op.create_table( + "sl_columns", + # AuditMixinNullable + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + # ExtraJSONMixin + sa.Column("extra_json", sa.Text(), nullable=True), + # ImportExportMixin + sa.Column("uuid", UUIDType(binary=True), primary_key=False, default=uuid4), + # Column + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("name", sa.TEXT(), nullable=False), + sa.Column("type", sa.TEXT(), nullable=False), + sa.Column("expression", sa.TEXT(), nullable=False), + sa.Column("is_physical", sa.BOOLEAN(), nullable=False, default=True,), + sa.Column("description", sa.TEXT(), nullable=True), + sa.Column("warning_text", sa.TEXT(), nullable=True), + sa.Column("unit", sa.TEXT(), nullable=True), + sa.Column("is_temporal", sa.BOOLEAN(), nullable=False), + sa.Column("is_spatial", sa.BOOLEAN(), nullable=False, default=False,), + sa.Column("is_partition", sa.BOOLEAN(), nullable=False, default=False,), + sa.Column("is_aggregation", sa.BOOLEAN(), nullable=False, default=False,), + sa.Column("is_additive", sa.BOOLEAN(), nullable=False, default=False,), + sa.Column("is_increase_desired", sa.BOOLEAN(), nullable=False, default=True,), + sa.Column( + "is_managed_externally", + sa.Boolean(), + nullable=False, + server_default=sa.false(), + ), + sa.Column("external_url", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + with op.batch_alter_table("sl_columns") as batch_op: + batch_op.create_unique_constraint("uq_sl_columns_uuid", ["uuid"]) + + op.create_table( + "sl_tables", + # AuditMixinNullable + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + # ExtraJSONMixin + sa.Column("extra_json", sa.Text(), nullable=True), + # ImportExportMixin + sa.Column("uuid", UUIDType(binary=True), primary_key=False, default=uuid4), + # Table + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("database_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("catalog", sa.TEXT(), nullable=True), + sa.Column("schema", sa.TEXT(), nullable=True), + sa.Column("name", sa.TEXT(), nullable=False), + sa.Column( + "is_managed_externally", + sa.Boolean(), + nullable=False, + server_default=sa.false(), + ), + sa.Column("external_url", sa.Text(), nullable=True), + sa.ForeignKeyConstraint(["database_id"], ["dbs.id"], name="sl_tables_ibfk_1"), + sa.PrimaryKeyConstraint("id"), + ) + with op.batch_alter_table("sl_tables") as batch_op: + batch_op.create_unique_constraint("uq_sl_tables_uuid", ["uuid"]) + + op.create_table( + "sl_table_columns", + sa.Column("table_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("column_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["column_id"], ["sl_columns.id"], name="sl_table_columns_ibfk_2" + ), + sa.ForeignKeyConstraint( + ["table_id"], ["sl_tables.id"], name="sl_table_columns_ibfk_1" + ), + ) + + op.create_table( + "sl_datasets", + # AuditMixinNullable + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + # ExtraJSONMixin + sa.Column("extra_json", sa.Text(), nullable=True), + # ImportExportMixin + sa.Column("uuid", UUIDType(binary=True), primary_key=False, default=uuid4), + # Dataset + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("sqlatable_id", sa.INTEGER(), nullable=True), + sa.Column("name", sa.TEXT(), nullable=False), + sa.Column("expression", sa.TEXT(), nullable=False), + sa.Column("is_physical", sa.BOOLEAN(), nullable=False, default=False,), + sa.Column( + "is_managed_externally", + sa.Boolean(), + nullable=False, + server_default=sa.false(), + ), + sa.Column("external_url", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + with op.batch_alter_table("sl_datasets") as batch_op: + batch_op.create_unique_constraint("uq_sl_datasets_uuid", ["uuid"]) + batch_op.create_unique_constraint( + "uq_sl_datasets_sqlatable_id", ["sqlatable_id"] + ) + + op.create_table( + "sl_dataset_columns", + sa.Column("dataset_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("column_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["column_id"], ["sl_columns.id"], name="sl_dataset_columns_ibfk_2" + ), + sa.ForeignKeyConstraint( + ["dataset_id"], ["sl_datasets.id"], name="sl_dataset_columns_ibfk_1" + ), + ) + + op.create_table( + "sl_dataset_tables", + sa.Column("dataset_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("table_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["dataset_id"], ["sl_datasets.id"], name="sl_dataset_tables_ibfk_1" + ), + sa.ForeignKeyConstraint( + ["table_id"], ["sl_tables.id"], name="sl_dataset_tables_ibfk_2" + ), + ) + + # migrate existing datasets to the new models + bind = op.get_bind() + session = db.Session(bind=bind) # pylint: disable=no-member + + datasets = session.query(SqlaTable).all() + for dataset in datasets: + dataset.fetch_columns_and_metrics(session) + after_insert(target=dataset) + + +def downgrade(): + op.drop_table("sl_dataset_columns") + op.drop_table("sl_dataset_tables") + op.drop_table("sl_datasets") + op.drop_table("sl_table_columns") + op.drop_table("sl_tables") + op.drop_table("sl_columns") diff --git a/superset/models/helpers.py b/superset/models/helpers.py index be02997a21633..f1adadfbc453f 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -29,6 +29,7 @@ import sqlalchemy as sa import yaml from flask import escape, g, Markup +from flask_appbuilder import Model from flask_appbuilder.models.decorators import renders from flask_appbuilder.models.mixins import AuditMixin from flask_appbuilder.security.sqla.models import User @@ -510,3 +511,22 @@ def certification_details(self) -> Optional[str]: @property def warning_markdown(self) -> Optional[str]: return self.get_extra_dict().get("warning_markdown") + + +def clone_model( + target: Model, ignore: Optional[List[str]] = None, **kwargs: Any +) -> Model: + """ + Clone a SQLAlchemy model. + """ + ignore = ignore or [] + + table = target.__table__ + data = { + attr: getattr(target, attr) + for attr in table.columns.keys() + if attr not in table.primary_key.columns.keys() and attr not in ignore + } + data.update(kwargs) + + return target.__class__(**data) diff --git a/superset/security/manager.py b/superset/security/manager.py index 91b203e83f774..d3cae1f4d01b1 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -950,6 +950,7 @@ def set_perm( # pylint: disable=unused-argument .where(link_table.c.id == target.id) .values(perm=target.get_perm()) ) + target.perm = target.get_perm() if ( hasattr(target, "schema_perm") @@ -960,6 +961,7 @@ def set_perm( # pylint: disable=unused-argument .where(link_table.c.id == target.id) .values(schema_perm=target.get_schema_perm()) ) + target.schema_perm = target.get_schema_perm() pvm_names = [] if target.__tablename__ in {"dbs", "clusters"}: diff --git a/superset/tables/__init__.py b/superset/tables/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/tables/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/tables/models.py b/superset/tables/models.py new file mode 100644 index 0000000000000..e2489445c686b --- /dev/null +++ b/superset/tables/models.py @@ -0,0 +1,92 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +Table model. + +This model was introduced in SIP-68 (https://github.com/apache/superset/issues/14909), +and represents a "table" in a given database -- either a physical table or a view. In +addition to a table, new models for columns, metrics, and datasets were also introduced. + +These models are not fully implemented, and shouldn't be used yet. +""" + +from typing import List + +import sqlalchemy as sa +from flask_appbuilder import Model +from sqlalchemy.orm import backref, relationship +from sqlalchemy.schema import UniqueConstraint + +from superset.columns.models import Column +from superset.models.core import Database +from superset.models.helpers import ( + AuditMixinNullable, + ExtraJSONMixin, + ImportExportMixin, +) + +association_table = sa.Table( + "sl_table_columns", + Model.metadata, # pylint: disable=no-member + sa.Column("table_id", sa.ForeignKey("sl_tables.id")), + sa.Column("column_id", sa.ForeignKey("sl_columns.id")), +) + + +class Table(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin): + """ + A table/view in a database. + """ + + __tablename__ = "sl_tables" + + # Note this uniqueness constraint is not part of the physical schema, i.e., it does + # not exist in the migrations. The reason it does not physically exist is MySQL, + # PostgreSQL, etc. have a different interpretation of uniqueness when it comes to NULL + # which is problematic given the catalog and schema are optional. + __table_args__ = (UniqueConstraint("database_id", "catalog", "schema", "name"),) + + id = sa.Column(sa.Integer, primary_key=True) + + database_id = sa.Column(sa.Integer, sa.ForeignKey("dbs.id"), nullable=False) + database: Database = relationship( + "Database", + # TODO (betodealmeida): rename the backref to ``tables`` once we get rid of the + # old models. + backref=backref("new_tables", cascade="all, delete-orphan"), + foreign_keys=[database_id], + ) + + # We use ``sa.Text`` for these attributes because (1) in modern databases the + # performance is the same as ``VARCHAR``[1] and (2) because some table names can be + # **really** long (eg, Google Sheets URLs). + # + # [1] https://www.postgresql.org/docs/9.1/datatype-character.html + catalog = sa.Column(sa.Text) + schema = sa.Column(sa.Text) + name = sa.Column(sa.Text) + + # The relationship between tables and columns is 1:n, but we use a many-to-many + # association to differentiate between the relationship between datasets and + # columns. + columns: List[Column] = relationship( + "Column", secondary=association_table, cascade="all, delete" + ) + + # Column is managed externally and should be read-only inside Superset + is_managed_externally = sa.Column(sa.Boolean, nullable=False, default=False) + external_url = sa.Column(sa.Text, nullable=True) diff --git a/superset/tables/schemas.py b/superset/tables/schemas.py new file mode 100644 index 0000000000000..701a1359ba003 --- /dev/null +++ b/superset/tables/schemas.py @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +Schema for table model. + +This model was introduced in SIP-68 (https://github.com/apache/superset/issues/14909), +and represents a "table" in a given database -- either a physical table or a view. In +addition to a table, new models for columns, metrics, and datasets were also introduced. + +These models are not fully implemented, and shouldn't be used yet. +""" + +from marshmallow_sqlalchemy import SQLAlchemyAutoSchema + +from superset.tables.models import Table + + +class TableSchema(SQLAlchemyAutoSchema): + """ + Schema for the ``Table`` model. + """ + + class Meta: # pylint: disable=too-few-public-methods + model = Table + load_instance = True + include_relationships = True diff --git a/tests/integration_tests/dashboards/filter_sets/conftest.py b/tests/integration_tests/dashboards/filter_sets/conftest.py index d07f869f6a551..b7a28273b0a7e 100644 --- a/tests/integration_tests/dashboards/filter_sets/conftest.py +++ b/tests/integration_tests/dashboards/filter_sets/conftest.py @@ -21,7 +21,7 @@ import pytest -from superset import security_manager as sm +from superset import db, security_manager as sm from superset.dashboards.filter_sets.consts import ( DESCRIPTION_FIELD, JSON_METADATA_FIELD, @@ -66,20 +66,6 @@ security_manager: BaseSecurityManager = sm -# @pytest.fixture(autouse=True, scope="session") -# def setup_sample_data() -> Any: -# pass - - -@pytest.fixture(autouse=True) -def expire_on_commit_true() -> Generator[None, None, None]: - ctx: AppContext - with app.app_context() as ctx: - ctx.app.appbuilder.get_session.configure(expire_on_commit=False) - yield - ctx.app.appbuilder.get_session.configure(expire_on_commit=True) - - @pytest.fixture(autouse=True, scope="module") def test_users() -> Generator[Dict[str, int], None, None]: usernames = [ @@ -92,17 +78,14 @@ def test_users() -> Generator[Dict[str, int], None, None]: filter_set_role = build_filter_set_role() admin_role: Role = security_manager.find_role("Admin") usernames_to_ids = create_test_users(admin_role, filter_set_role, usernames) - yield usernames_to_ids - ctx: AppContext - delete_users(usernames_to_ids) + yield usernames_to_ids + delete_users(usernames_to_ids) def delete_users(usernames_to_ids: Dict[str, int]) -> None: - with app.app_context() as ctx: - session: Session = ctx.app.appbuilder.get_session - for username in usernames_to_ids.keys(): - session.delete(security_manager.find_user(username)) - session.commit() + for username in usernames_to_ids.keys(): + db.session.delete(security_manager.find_user(username)) + db.session.commit() def create_test_users( @@ -150,106 +133,86 @@ def client() -> Generator[FlaskClient[Any], None, None]: @pytest.fixture -def dashboard() -> Generator[Dashboard, None, None]: - dashboard: Dashboard - slice_: Slice - datasource: SqlaTable - database: Database - session: Session - try: - with app.app_context() as ctx: - dashboard_owner_user = security_manager.find_user(DASHBOARD_OWNER_USERNAME) - database = create_database("test_database_filter_sets") - datasource = create_datasource_table( - name="test_datasource", database=database, owners=[dashboard_owner_user] - ) - slice_ = create_slice( - datasource=datasource, name="test_slice", owners=[dashboard_owner_user] - ) - dashboard = create_dashboard( - dashboard_title="test_dashboard", - published=True, - slices=[slice_], - owners=[dashboard_owner_user], - ) - session = ctx.app.appbuilder.get_session - session.add(dashboard) - session.commit() - yield dashboard - except Exception as ex: - print(str(ex)) - finally: - with app.app_context() as ctx: - session = ctx.app.appbuilder.get_session - try: - dashboard.owners = [] - slice_.owners = [] - datasource.owners = [] - session.merge(dashboard) - session.merge(slice_) - session.merge(datasource) - session.commit() - session.delete(dashboard) - session.delete(slice_) - session.delete(datasource) - session.delete(database) - session.commit() - except Exception as ex: - print(str(ex)) +def dashboard(app_context) -> Generator[Dashboard, None, None]: + dashboard_owner_user = security_manager.find_user(DASHBOARD_OWNER_USERNAME) + database = create_database("test_database_filter_sets") + datasource = create_datasource_table( + name="test_datasource", database=database, owners=[dashboard_owner_user] + ) + slice_ = create_slice( + datasource=datasource, name="test_slice", owners=[dashboard_owner_user] + ) + dashboard = create_dashboard( + dashboard_title="test_dashboard", + published=True, + slices=[slice_], + owners=[dashboard_owner_user], + ) + db.session.add(dashboard) + db.session.commit() + + yield dashboard + + db.session.delete(dashboard) + db.session.delete(slice_) + db.session.delete(datasource) + db.session.delete(database) + db.session.commit() @pytest.fixture -def dashboard_id(dashboard) -> int: - return dashboard.id +def dashboard_id(dashboard: Dashboard) -> Generator[int, None, None]: + yield dashboard.id @pytest.fixture def filtersets( dashboard_id: int, test_users: Dict[str, int], dumped_valid_json_metadata: str ) -> Generator[Dict[str, List[FilterSet]], None, None]: - try: - with app.app_context() as ctx: - session: Session = ctx.app.appbuilder.get_session - first_filter_set = FilterSet( - name="filter_set_1_of_" + str(dashboard_id), - dashboard_id=dashboard_id, - json_metadata=dumped_valid_json_metadata, - owner_id=dashboard_id, - owner_type="Dashboard", - ) - second_filter_set = FilterSet( - name="filter_set_2_of_" + str(dashboard_id), - json_metadata=dumped_valid_json_metadata, - dashboard_id=dashboard_id, - owner_id=dashboard_id, - owner_type="Dashboard", - ) - third_filter_set = FilterSet( - name="filter_set_3_of_" + str(dashboard_id), - json_metadata=dumped_valid_json_metadata, - dashboard_id=dashboard_id, - owner_id=test_users[FILTER_SET_OWNER_USERNAME], - owner_type="User", - ) - forth_filter_set = FilterSet( - name="filter_set_4_of_" + str(dashboard_id), - json_metadata=dumped_valid_json_metadata, - dashboard_id=dashboard_id, - owner_id=test_users[FILTER_SET_OWNER_USERNAME], - owner_type="User", - ) - session.add(first_filter_set) - session.add(second_filter_set) - session.add(third_filter_set) - session.add(forth_filter_set) - session.commit() - yv = { - "Dashboard": [first_filter_set, second_filter_set], - FILTER_SET_OWNER_USERNAME: [third_filter_set, forth_filter_set], - } - yield yv - except Exception as ex: - print(str(ex)) + first_filter_set = FilterSet( + name="filter_set_1_of_" + str(dashboard_id), + dashboard_id=dashboard_id, + json_metadata=dumped_valid_json_metadata, + owner_id=dashboard_id, + owner_type="Dashboard", + ) + second_filter_set = FilterSet( + name="filter_set_2_of_" + str(dashboard_id), + json_metadata=dumped_valid_json_metadata, + dashboard_id=dashboard_id, + owner_id=dashboard_id, + owner_type="Dashboard", + ) + third_filter_set = FilterSet( + name="filter_set_3_of_" + str(dashboard_id), + json_metadata=dumped_valid_json_metadata, + dashboard_id=dashboard_id, + owner_id=test_users[FILTER_SET_OWNER_USERNAME], + owner_type="User", + ) + fourth_filter_set = FilterSet( + name="filter_set_4_of_" + str(dashboard_id), + json_metadata=dumped_valid_json_metadata, + dashboard_id=dashboard_id, + owner_id=test_users[FILTER_SET_OWNER_USERNAME], + owner_type="User", + ) + db.session.add(first_filter_set) + db.session.add(second_filter_set) + db.session.add(third_filter_set) + db.session.add(fourth_filter_set) + db.session.commit() + + yield { + "Dashboard": [first_filter_set, second_filter_set], + FILTER_SET_OWNER_USERNAME: [third_filter_set, fourth_filter_set], + } + + db.session.delete(first_filter_set) + db.session.delete(second_filter_set) + db.session.delete(third_filter_set) + db.session.delete(fourth_filter_set) + db.session.commit() @pytest.fixture @@ -299,8 +262,8 @@ def valid_filter_set_data_for_update( @pytest.fixture -def not_exists_dashboard(dashboard_id: int) -> int: - return dashboard_id + 1 +def not_exists_dashboard_id(dashboard_id: int) -> Generator[int, None, None]: + yield dashboard_id + 1 @pytest.fixture diff --git a/tests/integration_tests/dashboards/filter_sets/create_api_tests.py b/tests/integration_tests/dashboards/filter_sets/create_api_tests.py index cbdaef9b95a01..fcd2923fb8696 100644 --- a/tests/integration_tests/dashboards/filter_sets/create_api_tests.py +++ b/tests/integration_tests/dashboards/filter_sets/create_api_tests.py @@ -94,14 +94,14 @@ def test_with_id_field__400( def test_with_dashboard_not_exists__404( self, - not_exists_dashboard: int, + not_exists_dashboard_id: int, valid_filter_set_data_for_create: Dict[str, Any], client: FlaskClient[Any], ): # act login(client, "admin") response = call_create_filter_set( - client, not_exists_dashboard, valid_filter_set_data_for_create + client, not_exists_dashboard_id, valid_filter_set_data_for_create ) # assert diff --git a/tests/integration_tests/dashboards/filter_sets/delete_api_tests.py b/tests/integration_tests/dashboards/filter_sets/delete_api_tests.py index 34f52011d812b..8e7e0bcb6004e 100644 --- a/tests/integration_tests/dashboards/filter_sets/delete_api_tests.py +++ b/tests/integration_tests/dashboards/filter_sets/delete_api_tests.py @@ -61,7 +61,7 @@ def test_with_dashboard_exists_filterset_not_exists__200( def test_with_dashboard_not_exists_filterset_not_exists__404( self, - not_exists_dashboard: int, + not_exists_dashboard_id: int, filtersets: Dict[str, List[FilterSet]], client: FlaskClient[Any], ): @@ -70,14 +70,14 @@ def test_with_dashboard_not_exists_filterset_not_exists__404( filter_set_id = max(collect_all_ids(filtersets)) + 1 response = call_delete_filter_set( - client, {"id": filter_set_id}, not_exists_dashboard + client, {"id": filter_set_id}, not_exists_dashboard_id ) # assert assert response.status_code == 404 def test_with_dashboard_not_exists_filterset_exists__404( self, - not_exists_dashboard: int, + not_exists_dashboard_id: int, dashboard_based_filter_set_dict: Dict[str, Any], client: FlaskClient[Any], ): @@ -86,7 +86,7 @@ def test_with_dashboard_not_exists_filterset_exists__404( # act response = call_delete_filter_set( - client, dashboard_based_filter_set_dict, not_exists_dashboard + client, dashboard_based_filter_set_dict, not_exists_dashboard_id ) # assert assert response.status_code == 404 diff --git a/tests/integration_tests/dashboards/filter_sets/get_api_tests.py b/tests/integration_tests/dashboards/filter_sets/get_api_tests.py index a1ad55aa2a8bd..0d36a0a593e5b 100644 --- a/tests/integration_tests/dashboards/filter_sets/get_api_tests.py +++ b/tests/integration_tests/dashboards/filter_sets/get_api_tests.py @@ -37,13 +37,13 @@ class TestGetFilterSetsApi: def test_with_dashboard_not_exists__404( - self, not_exists_dashboard: int, client: FlaskClient[Any], + self, not_exists_dashboard_id: int, client: FlaskClient[Any], ): # arrange login(client, "admin") # act - response = call_get_filter_sets(client, not_exists_dashboard) + response = call_get_filter_sets(client, not_exists_dashboard_id) # assert assert response.status_code == 404 diff --git a/tests/integration_tests/dashboards/filter_sets/update_api_tests.py b/tests/integration_tests/dashboards/filter_sets/update_api_tests.py index ccd5ae83afba1..4096e100994f8 100644 --- a/tests/integration_tests/dashboards/filter_sets/update_api_tests.py +++ b/tests/integration_tests/dashboards/filter_sets/update_api_tests.py @@ -85,7 +85,7 @@ def test_with_dashboard_exists_filterset_not_exists__404( def test_with_dashboard_not_exists_filterset_not_exists__404( self, - not_exists_dashboard: int, + not_exists_dashboard_id: int, filtersets: Dict[str, List[FilterSet]], client: FlaskClient[Any], ): @@ -94,14 +94,14 @@ def test_with_dashboard_not_exists_filterset_not_exists__404( filter_set_id = max(collect_all_ids(filtersets)) + 1 response = call_update_filter_set( - client, {"id": filter_set_id}, {}, not_exists_dashboard + client, {"id": filter_set_id}, {}, not_exists_dashboard_id ) # assert assert response.status_code == 404 def test_with_dashboard_not_exists_filterset_exists__404( self, - not_exists_dashboard: int, + not_exists_dashboard_id: int, dashboard_based_filter_set_dict: Dict[str, Any], client: FlaskClient[Any], ): @@ -110,7 +110,7 @@ def test_with_dashboard_not_exists_filterset_exists__404( # act response = call_update_filter_set( - client, dashboard_based_filter_set_dict, {}, not_exists_dashboard + client, dashboard_based_filter_set_dict, {}, not_exists_dashboard_id ) # assert assert response.status_code == 404 diff --git a/tests/integration_tests/dashboards/superset_factory_util.py b/tests/integration_tests/dashboards/superset_factory_util.py index b67c60ca0736f..b160a56a33fbf 100644 --- a/tests/integration_tests/dashboards/superset_factory_util.py +++ b/tests/integration_tests/dashboards/superset_factory_util.py @@ -20,7 +20,7 @@ from flask_appbuilder import Model from flask_appbuilder.security.sqla.models import User -from superset import appbuilder +from superset import db from superset.connectors.sqla.models import SqlaTable, sqlatable_user from superset.models.core import Database from superset.models.dashboard import ( @@ -38,7 +38,7 @@ logger = logging.getLogger(__name__) -session = appbuilder.get_session +session = db.session inserted_dashboards_ids = [] inserted_databases_ids = [] @@ -192,9 +192,11 @@ def delete_all_inserted_objects() -> None: def delete_all_inserted_dashboards(): try: - dashboards_to_delete: List[Dashboard] = session.query(Dashboard).filter( - Dashboard.id.in_(inserted_dashboards_ids) - ).all() + dashboards_to_delete: List[Dashboard] = ( + session.query(Dashboard) + .filter(Dashboard.id.in_(inserted_dashboards_ids)) + .all() + ) for dashboard in dashboards_to_delete: try: delete_dashboard(dashboard, False) @@ -239,9 +241,9 @@ def delete_dashboard_slices_associations(dashboard: Dashboard) -> None: def delete_all_inserted_slices(): try: - slices_to_delete: List[Slice] = session.query(Slice).filter( - Slice.id.in_(inserted_slices_ids) - ).all() + slices_to_delete: List[Slice] = ( + session.query(Slice).filter(Slice.id.in_(inserted_slices_ids)).all() + ) for slice in slices_to_delete: try: delete_slice(slice, False) @@ -270,9 +272,11 @@ def delete_slice_users_associations(slice_: Slice) -> None: def delete_all_inserted_tables(): try: - tables_to_delete: List[SqlaTable] = session.query(SqlaTable).filter( - SqlaTable.id.in_(inserted_sqltables_ids) - ).all() + tables_to_delete: List[SqlaTable] = ( + session.query(SqlaTable) + .filter(SqlaTable.id.in_(inserted_sqltables_ids)) + .all() + ) for table in tables_to_delete: try: delete_sqltable(table, False) @@ -303,9 +307,11 @@ def delete_table_users_associations(table: SqlaTable) -> None: def delete_all_inserted_dbs(): try: - dbs_to_delete: List[Database] = session.query(Database).filter( - Database.id.in_(inserted_databases_ids) - ).all() + dbs_to_delete: List[Database] = ( + session.query(Database) + .filter(Database.id.in_(inserted_databases_ids)) + .all() + ) for db in dbs_to_delete: try: delete_database(db, False) diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index aaf76338ef01a..eeda824500fe6 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -500,6 +500,7 @@ def test_create_dataset_validate_uniqueness(self): "message": {"table_name": ["Dataset energy_usage already exists"]} } + @unittest.skip("test is failing stochastically") def test_create_dataset_same_name_different_schema(self): if backend() == "sqlite": # sqlite doesn't support schemas diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py index f1534692bed86..b311612fcd2b4 100644 --- a/tests/integration_tests/security_tests.py +++ b/tests/integration_tests/security_tests.py @@ -52,6 +52,10 @@ public_role_like_gamma, public_role_like_test_role, ) +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) from tests.integration_tests.fixtures.world_bank_dashboard import ( load_world_bank_dashboard_with_slices, load_world_bank_data, @@ -224,7 +228,7 @@ def test_set_perm_sqla_table(self): ) # database change - new_db = Database(sqlalchemy_uri="some_uri", database_name="tmp_db") + new_db = Database(sqlalchemy_uri="sqlite://", database_name="tmp_db") session.add(new_db) stored_table.database = ( session.query(Database).filter_by(database_name="tmp_db").one() @@ -358,9 +362,7 @@ def test_set_perm_druid_cluster(self): def test_set_perm_database(self): session = db.session - database = Database( - database_name="tmp_database", sqlalchemy_uri="sqlite://test" - ) + database = Database(database_name="tmp_database", sqlalchemy_uri="sqlite://") session.add(database) stored_db = ( @@ -411,9 +413,7 @@ def test_hybrid_perm_druid_cluster(self): db.session.commit() def test_hybrid_perm_database(self): - database = Database( - database_name="tmp_database3", sqlalchemy_uri="sqlite://test" - ) + database = Database(database_name="tmp_database3", sqlalchemy_uri="sqlite://") db.session.add(database) @@ -437,9 +437,7 @@ def test_hybrid_perm_database(self): def test_set_perm_slice(self): session = db.session - database = Database( - database_name="tmp_database", sqlalchemy_uri="sqlite://test" - ) + database = Database(database_name="tmp_database", sqlalchemy_uri="sqlite://") table = SqlaTable(table_name="tmp_perm_table", database=database) session.add(database) session.add(table) @@ -573,6 +571,7 @@ def test_gamma_user_schema_access_to_charts(self): ) # wb_health_population slice, has access self.assertNotIn("Girl Name Cloud", data) # birth_names slice, no access + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @pytest.mark.usefixtures("public_role_like_gamma") def test_public_sync_role_data_perms(self): """ diff --git a/tests/unit_tests/columns/__init__.py b/tests/unit_tests/columns/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/columns/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/columns/test_models.py b/tests/unit_tests/columns/test_models.py new file mode 100644 index 0000000000000..36c6b9b4e7301 --- /dev/null +++ b/tests/unit_tests/columns/test_models.py @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel, unused-argument + +from sqlalchemy.orm.session import Session + + +def test_column_model(app_context: None, session: Session) -> None: + """ + Test basic attributes of a ``Column``. + """ + from superset.columns.models import Column + + engine = session.get_bind() + Column.metadata.create_all(engine) # pylint: disable=no-member + + column = Column(name="ds", type="TIMESTAMP", expression="ds",) + + session.add(column) + session.flush() + + assert column.id == 1 + assert column.uuid is not None + + assert column.name == "ds" + assert column.type == "TIMESTAMP" + assert column.expression == "ds" + + # test that default values are set correctly + assert column.description is None + assert column.warning_text is None + assert column.unit is None + assert column.is_temporal is False + assert column.is_spatial is False + assert column.is_partition is False + assert column.is_aggregation is False + assert column.is_additive is False + assert column.is_increase_desired is True diff --git a/tests/unit_tests/datasets/commands/importers/v1/import_test.py b/tests/unit_tests/datasets/commands/importers/v1/import_test.py index e622c55c3bc27..0aa0f67a07690 100644 --- a/tests/unit_tests/datasets/commands/importers/v1/import_test.py +++ b/tests/unit_tests/datasets/commands/importers/v1/import_test.py @@ -22,15 +22,14 @@ from sqlalchemy.orm.session import Session -from superset.datasets.schemas import ImportV1DatasetSchema - -def test_import_(app_context: None, session: Session) -> None: +def test_import_dataset(app_context: None, session: Session) -> None: """ Test importing a dataset. """ from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.datasets.commands.importers.v1.utils import import_dataset + from superset.datasets.schemas import ImportV1DatasetSchema from superset.models.core import Database engine = session.get_bind() @@ -120,11 +119,11 @@ def test_import_(app_context: None, session: Session) -> None: assert len(sqla_table.columns) == 1 assert sqla_table.columns[0].column_name == "profit" assert sqla_table.columns[0].verbose_name is None - assert sqla_table.columns[0].is_dttm is False - assert sqla_table.columns[0].is_active is True + assert sqla_table.columns[0].is_dttm is None + assert sqla_table.columns[0].is_active is None assert sqla_table.columns[0].type == "INTEGER" - assert sqla_table.columns[0].groupby is True - assert sqla_table.columns[0].filterable is True + assert sqla_table.columns[0].groupby is None + assert sqla_table.columns[0].filterable is None assert sqla_table.columns[0].expression == "revenue-expenses" assert sqla_table.columns[0].description is None assert sqla_table.columns[0].python_date_format is None @@ -139,6 +138,7 @@ def test_import_column_extra_is_string(app_context: None, session: Session) -> N """ from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.datasets.commands.importers.v1.utils import import_dataset + from superset.datasets.schemas import ImportV1DatasetSchema from superset.models.core import Database engine = session.get_bind() diff --git a/tests/unit_tests/datasets/test_models.py b/tests/unit_tests/datasets/test_models.py new file mode 100644 index 0000000000000..eab0a8aa28288 --- /dev/null +++ b/tests/unit_tests/datasets/test_models.py @@ -0,0 +1,1244 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel, unused-argument, unused-import, too-many-locals, invalid-name, too-many-lines + +import json +from datetime import datetime, timezone + +from pytest_mock import MockFixture +from sqlalchemy.orm.session import Session + + +def test_dataset_model(app_context: None, session: Session) -> None: + """ + Test basic attributes of a ``Dataset``. + """ + from superset.columns.models import Column + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + table = Table( + name="my_table", + schema="my_schema", + catalog="my_catalog", + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + columns=[ + Column(name="longitude", expression="longitude"), + Column(name="latitude", expression="latitude"), + ], + ) + session.add(table) + session.flush() + + dataset = Dataset( + name="positions", + expression=""" +SELECT array_agg(array[longitude,latitude]) AS position +FROM my_catalog.my_schema.my_table +""", + tables=[table], + columns=[ + Column(name="position", expression="array_agg(array[longitude,latitude])",), + ], + ) + session.add(dataset) + session.flush() + + assert dataset.id == 1 + assert dataset.uuid is not None + + assert dataset.name == "positions" + assert ( + dataset.expression + == """ +SELECT array_agg(array[longitude,latitude]) AS position +FROM my_catalog.my_schema.my_table +""" + ) + + assert [table.name for table in dataset.tables] == ["my_table"] + assert [column.name for column in dataset.columns] == ["position"] + + +def test_cascade_delete_table(app_context: None, session: Session) -> None: + """ + Test that deleting ``Table`` also deletes its columns. + """ + from superset.columns.models import Column + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Table.metadata.create_all(engine) # pylint: disable=no-member + + table = Table( + name="my_table", + schema="my_schema", + catalog="my_catalog", + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + columns=[ + Column(name="longitude", expression="longitude"), + Column(name="latitude", expression="latitude"), + ], + ) + session.add(table) + session.flush() + + columns = session.query(Column).all() + assert len(columns) == 2 + + session.delete(table) + session.flush() + + # test that columns were deleted + columns = session.query(Column).all() + assert len(columns) == 0 + + +def test_cascade_delete_dataset(app_context: None, session: Session) -> None: + """ + Test that deleting ``Dataset`` also deletes its columns. + """ + from superset.columns.models import Column + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + table = Table( + name="my_table", + schema="my_schema", + catalog="my_catalog", + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + columns=[ + Column(name="longitude", expression="longitude"), + Column(name="latitude", expression="latitude"), + ], + ) + session.add(table) + session.flush() + + dataset = Dataset( + name="positions", + expression=""" +SELECT array_agg(array[longitude,latitude]) AS position +FROM my_catalog.my_schema.my_table +""", + tables=[table], + columns=[ + Column(name="position", expression="array_agg(array[longitude,latitude])",), + ], + ) + session.add(dataset) + session.flush() + + columns = session.query(Column).all() + assert len(columns) == 3 + + session.delete(dataset) + session.flush() + + # test that dataset columns were deleted (but not table columns) + columns = session.query(Column).all() + assert len(columns) == 2 + + +def test_dataset_attributes(app_context: None, session: Session) -> None: + """ + Test that checks attributes in the dataset. + + If this check fails it means new attributes were added to ``SqlaTable``, and + ``SqlaTable.after_insert`` should be updated to handle them! + """ + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.models.core import Database + + engine = session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + TableColumn(column_name="user_id", type="INTEGER"), + TableColumn(column_name="revenue", type="INTEGER"), + TableColumn(column_name="expenses", type="INTEGER"), + TableColumn( + column_name="profit", type="INTEGER", expression="revenue-expenses" + ), + ] + metrics = [ + SqlMetric(metric_name="cnt", expression="COUNT(*)"), + ] + + sqla_table = SqlaTable( + table_name="old_dataset", + columns=columns, + metrics=metrics, + main_dttm_col="ds", + default_endpoint="https://www.youtube.com/watch?v=dQw4w9WgXcQ", # not used + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + offset=-8, + description="This is the description", + is_featured=1, + cache_timeout=3600, + schema="my_schema", + sql=None, + params=json.dumps( + {"remote_id": 64, "database_name": "examples", "import_time": 1606677834,} + ), + perm=None, + filter_select_enabled=1, + fetch_values_predicate="foo IN (1, 2)", + is_sqllab_view=0, # no longer used? + template_params=json.dumps({"answer": "42"}), + schema_perm=None, + extra=json.dumps({"warning_markdown": "*WARNING*"}), + ) + + session.add(sqla_table) + session.flush() + + dataset = session.query(SqlaTable).one() + # If this test fails because attributes changed, make sure to update + # ``SqlaTable.after_insert`` accordingly. + assert sorted(dataset.__dict__.keys()) == [ + "_sa_instance_state", + "cache_timeout", + "changed_by_fk", + "changed_on", + "columns", + "created_by_fk", + "created_on", + "database", + "database_id", + "default_endpoint", + "description", + "external_url", + "extra", + "fetch_values_predicate", + "filter_select_enabled", + "id", + "is_featured", + "is_managed_externally", + "is_sqllab_view", + "main_dttm_col", + "metrics", + "offset", + "params", + "perm", + "schema", + "schema_perm", + "sql", + "table_name", + "template_params", + "uuid", + ] + + +def test_create_physical_sqlatable(app_context: None, session: Session) -> None: + """ + Test shadow write when creating a new ``SqlaTable``. + + When a new physical ``SqlaTable`` is created, new models should also be created for + ``Dataset``, ``Table``, and ``Column``. + """ + from superset.columns.models import Column + from superset.columns.schemas import ColumnSchema + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.datasets.models import Dataset + from superset.datasets.schemas import DatasetSchema + from superset.models.core import Database + from superset.tables.models import Table + from superset.tables.schemas import TableSchema + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + TableColumn(column_name="user_id", type="INTEGER"), + TableColumn(column_name="revenue", type="INTEGER"), + TableColumn(column_name="expenses", type="INTEGER"), + TableColumn( + column_name="profit", type="INTEGER", expression="revenue-expenses" + ), + ] + metrics = [ + SqlMetric(metric_name="cnt", expression="COUNT(*)"), + ] + + sqla_table = SqlaTable( + table_name="old_dataset", + columns=columns, + metrics=metrics, + main_dttm_col="ds", + default_endpoint="https://www.youtube.com/watch?v=dQw4w9WgXcQ", # not used + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + offset=-8, + description="This is the description", + is_featured=1, + cache_timeout=3600, + schema="my_schema", + sql=None, + params=json.dumps( + {"remote_id": 64, "database_name": "examples", "import_time": 1606677834,} + ), + perm=None, + filter_select_enabled=1, + fetch_values_predicate="foo IN (1, 2)", + is_sqllab_view=0, # no longer used? + template_params=json.dumps({"answer": "42"}), + schema_perm=None, + extra=json.dumps({"warning_markdown": "*WARNING*"}), + ) + session.add(sqla_table) + session.flush() + + # ignore these keys when comparing results + ignored_keys = {"created_on", "changed_on", "uuid"} + + # check that columns were created + column_schema = ColumnSchema() + column_schemas = [ + {k: v for k, v in column_schema.dump(column).items() if k not in ignored_keys} + for column in session.query(Column).all() + ] + assert column_schemas == [ + { + "changed_by": None, + "created_by": None, + "description": None, + "expression": "ds", + "extra_json": "{}", + "id": 1, + "is_increase_desired": True, + "is_additive": False, + "is_aggregation": False, + "is_partition": False, + "is_physical": True, + "is_spatial": False, + "is_temporal": True, + "name": "ds", + "type": "TIMESTAMP", + "unit": None, + "warning_text": None, + "is_managed_externally": False, + "external_url": None, + }, + { + "changed_by": None, + "created_by": None, + "description": None, + "expression": "user_id", + "extra_json": "{}", + "id": 2, + "is_increase_desired": True, + "is_additive": False, + "is_aggregation": False, + "is_partition": False, + "is_physical": True, + "is_spatial": False, + "is_temporal": False, + "name": "user_id", + "type": "INTEGER", + "unit": None, + "warning_text": None, + "is_managed_externally": False, + "external_url": None, + }, + { + "changed_by": None, + "created_by": None, + "description": None, + "expression": "revenue", + "extra_json": "{}", + "id": 3, + "is_increase_desired": True, + "is_additive": False, + "is_aggregation": False, + "is_partition": False, + "is_physical": True, + "is_spatial": False, + "is_temporal": False, + "name": "revenue", + "type": "INTEGER", + "unit": None, + "warning_text": None, + "is_managed_externally": False, + "external_url": None, + }, + { + "changed_by": None, + "created_by": None, + "description": None, + "expression": "expenses", + "extra_json": "{}", + "id": 4, + "is_increase_desired": True, + "is_additive": False, + "is_aggregation": False, + "is_partition": False, + "is_physical": True, + "is_spatial": False, + "is_temporal": False, + "name": "expenses", + "type": "INTEGER", + "unit": None, + "warning_text": None, + "is_managed_externally": False, + "external_url": None, + }, + { + "changed_by": None, + "created_by": None, + "description": None, + "expression": "revenue-expenses", + "extra_json": "{}", + "id": 5, + "is_increase_desired": True, + "is_additive": False, + "is_aggregation": False, + "is_partition": False, + "is_physical": False, + "is_spatial": False, + "is_temporal": False, + "name": "profit", + "type": "INTEGER", + "unit": None, + "warning_text": None, + "is_managed_externally": False, + "external_url": None, + }, + { + "changed_by": None, + "created_by": None, + "description": None, + "expression": "COUNT(*)", + "extra_json": "{}", + "id": 6, + "is_increase_desired": True, + "is_additive": False, + "is_aggregation": True, + "is_partition": False, + "is_physical": False, + "is_spatial": False, + "is_temporal": False, + "name": "cnt", + "type": "Unknown", + "unit": None, + "warning_text": None, + "is_managed_externally": False, + "external_url": None, + }, + ] + + # check that table was created + table_schema = TableSchema() + tables = [ + {k: v for k, v in table_schema.dump(table).items() if k not in ignored_keys} + for table in session.query(Table).all() + ] + assert tables == [ + { + "extra_json": "{}", + "catalog": None, + "schema": "my_schema", + "name": "old_dataset", + "id": 1, + "database": 1, + "columns": [1, 2, 3, 4], + "created_by": None, + "changed_by": None, + "is_managed_externally": False, + "external_url": None, + } + ] + + # check that dataset was created + dataset_schema = DatasetSchema() + datasets = [ + {k: v for k, v in dataset_schema.dump(dataset).items() if k not in ignored_keys} + for dataset in session.query(Dataset).all() + ] + assert datasets == [ + { + "id": 1, + "sqlatable_id": 1, + "name": "old_dataset", + "changed_by": None, + "created_by": None, + "columns": [1, 2, 3, 4, 5, 6], + "is_physical": True, + "tables": [1], + "extra_json": "{}", + "expression": "old_dataset", + "is_managed_externally": False, + "external_url": None, + } + ] + + +def test_create_virtual_sqlatable( + mocker: MockFixture, app_context: None, session: Session +) -> None: + """ + Test shadow write when creating a new ``SqlaTable``. + + When a new virtual ``SqlaTable`` is created, new models should also be created for + ``Dataset`` and ``Column``. + """ + # patch session + mocker.patch( + "superset.security.SupersetSecurityManager.get_session", return_value=session + ) + + from superset.columns.models import Column + from superset.columns.schemas import ColumnSchema + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.datasets.models import Dataset + from superset.datasets.schemas import DatasetSchema + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + # create the ``Table`` that the virtual dataset points to + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + table = Table( + name="some_table", + schema="my_schema", + catalog=None, + database=database, + columns=[ + Column(name="ds", is_temporal=True, type="TIMESTAMP"), + Column(name="user_id", type="INTEGER"), + Column(name="revenue", type="INTEGER"), + Column(name="expenses", type="INTEGER"), + ], + ) + session.add(table) + session.commit() + + # create virtual dataset + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + TableColumn(column_name="user_id", type="INTEGER"), + TableColumn(column_name="revenue", type="INTEGER"), + TableColumn(column_name="expenses", type="INTEGER"), + TableColumn( + column_name="profit", type="INTEGER", expression="revenue-expenses" + ), + ] + metrics = [ + SqlMetric(metric_name="cnt", expression="COUNT(*)"), + ] + + sqla_table = SqlaTable( + table_name="old_dataset", + columns=columns, + metrics=metrics, + main_dttm_col="ds", + default_endpoint="https://www.youtube.com/watch?v=dQw4w9WgXcQ", # not used + database=database, + offset=-8, + description="This is the description", + is_featured=1, + cache_timeout=3600, + schema="my_schema", + sql=""" +SELECT + ds, + user_id, + revenue, + expenses, + revenue - expenses AS profit +FROM + some_table""", + params=json.dumps( + {"remote_id": 64, "database_name": "examples", "import_time": 1606677834,} + ), + perm=None, + filter_select_enabled=1, + fetch_values_predicate="foo IN (1, 2)", + is_sqllab_view=0, # no longer used? + template_params=json.dumps({"answer": "42"}), + schema_perm=None, + extra=json.dumps({"warning_markdown": "*WARNING*"}), + ) + session.add(sqla_table) + session.flush() + + # ignore these keys when comparing results + ignored_keys = {"created_on", "changed_on", "uuid"} + + # check that columns were created + column_schema = ColumnSchema() + column_schemas = [ + {k: v for k, v in column_schema.dump(column).items() if k not in ignored_keys} + for column in session.query(Column).all() + ] + assert column_schemas == [ + { + "type": "TIMESTAMP", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": None, + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "ds", + "is_physical": True, + "changed_by": None, + "is_temporal": True, + "id": 1, + "is_aggregation": False, + "external_url": None, + "is_managed_externally": False, + }, + { + "type": "INTEGER", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": None, + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "user_id", + "is_physical": True, + "changed_by": None, + "is_temporal": False, + "id": 2, + "is_aggregation": False, + "external_url": None, + "is_managed_externally": False, + }, + { + "type": "INTEGER", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": None, + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "revenue", + "is_physical": True, + "changed_by": None, + "is_temporal": False, + "id": 3, + "is_aggregation": False, + "external_url": None, + "is_managed_externally": False, + }, + { + "type": "INTEGER", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": None, + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "expenses", + "is_physical": True, + "changed_by": None, + "is_temporal": False, + "id": 4, + "is_aggregation": False, + "external_url": None, + "is_managed_externally": False, + }, + { + "type": "TIMESTAMP", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": "ds", + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "ds", + "is_physical": False, + "changed_by": None, + "is_temporal": True, + "id": 5, + "is_aggregation": False, + "external_url": None, + "is_managed_externally": False, + }, + { + "type": "INTEGER", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": "user_id", + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "user_id", + "is_physical": False, + "changed_by": None, + "is_temporal": False, + "id": 6, + "is_aggregation": False, + "external_url": None, + "is_managed_externally": False, + }, + { + "type": "INTEGER", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": "revenue", + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "revenue", + "is_physical": False, + "changed_by": None, + "is_temporal": False, + "id": 7, + "is_aggregation": False, + "external_url": None, + "is_managed_externally": False, + }, + { + "type": "INTEGER", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": "expenses", + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "expenses", + "is_physical": False, + "changed_by": None, + "is_temporal": False, + "id": 8, + "is_aggregation": False, + "external_url": None, + "is_managed_externally": False, + }, + { + "type": "INTEGER", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": "revenue-expenses", + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "profit", + "is_physical": False, + "changed_by": None, + "is_temporal": False, + "id": 9, + "is_aggregation": False, + "external_url": None, + "is_managed_externally": False, + }, + { + "type": "Unknown", + "is_additive": False, + "extra_json": "{}", + "is_partition": False, + "expression": "COUNT(*)", + "unit": None, + "warning_text": None, + "created_by": None, + "is_increase_desired": True, + "description": None, + "is_spatial": False, + "name": "cnt", + "is_physical": False, + "changed_by": None, + "is_temporal": False, + "id": 10, + "is_aggregation": True, + "external_url": None, + "is_managed_externally": False, + }, + ] + + # check that dataset was created, and has a reference to the table + dataset_schema = DatasetSchema() + datasets = [ + {k: v for k, v in dataset_schema.dump(dataset).items() if k not in ignored_keys} + for dataset in session.query(Dataset).all() + ] + assert datasets == [ + { + "id": 1, + "sqlatable_id": 1, + "name": "old_dataset", + "changed_by": None, + "created_by": None, + "columns": [5, 6, 7, 8, 9, 10], + "is_physical": False, + "tables": [1], + "extra_json": "{}", + "external_url": None, + "is_managed_externally": False, + "expression": """ +SELECT + ds, + user_id, + revenue, + expenses, + revenue - expenses AS profit +FROM + some_table""", + } + ] + + +def test_delete_sqlatable(app_context: None, session: Session) -> None: + """ + Test that deleting a ``SqlaTable`` also deletes the corresponding ``Dataset``. + """ + from superset.columns.models import Column + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + ] + sqla_table = SqlaTable( + table_name="old_dataset", + columns=columns, + metrics=[], + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + ) + session.add(sqla_table) + session.flush() + + datasets = session.query(Dataset).all() + assert len(datasets) == 1 + + session.delete(sqla_table) + session.flush() + + # test that dataset was also deleted + datasets = session.query(Dataset).all() + assert len(datasets) == 0 + + +def test_update_sqlatable( + mocker: MockFixture, app_context: None, session: Session +) -> None: + """ + Test that updating a ``SqlaTable`` also updates the corresponding ``Dataset``. + """ + # patch session + mocker.patch( + "superset.security.SupersetSecurityManager.get_session", return_value=session + ) + + from superset.columns.models import Column + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + ] + sqla_table = SqlaTable( + table_name="old_dataset", + columns=columns, + metrics=[], + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + ) + session.add(sqla_table) + session.flush() + + dataset = session.query(Dataset).one() + assert len(dataset.columns) == 1 + + # add a column to the original ``SqlaTable`` instance + sqla_table.columns.append(TableColumn(column_name="user_id", type="INTEGER")) + session.flush() + + # check that the column was added to the dataset + dataset = session.query(Dataset).one() + assert len(dataset.columns) == 2 + + # delete the column in the original instance + sqla_table.columns = sqla_table.columns[1:] + session.flush() + + # check that the column was also removed from the dataset + dataset = session.query(Dataset).one() + assert len(dataset.columns) == 1 + + # modify the attribute in a column + sqla_table.columns[0].is_dttm = True + session.flush() + + # check that the dataset column was modified + dataset = session.query(Dataset).one() + assert dataset.columns[0].is_temporal is True + + +def test_update_sqlatable_schema( + mocker: MockFixture, app_context: None, session: Session +) -> None: + """ + Test that updating a ``SqlaTable`` schema also updates the corresponding ``Dataset``. + """ + # patch session + mocker.patch( + "superset.security.SupersetSecurityManager.get_session", return_value=session + ) + mocker.patch("superset.datasets.dao.db.session", session) + + from superset.columns.models import Column + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + ] + sqla_table = SqlaTable( + table_name="old_dataset", + schema="old_schema", + columns=columns, + metrics=[], + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + ) + session.add(sqla_table) + session.flush() + + dataset = session.query(Dataset).one() + assert dataset.tables[0].schema == "old_schema" + assert dataset.tables[0].id == 1 + + sqla_table.schema = "new_schema" + session.flush() + + dataset = session.query(Dataset).one() + assert dataset.tables[0].schema == "new_schema" + assert dataset.tables[0].id == 2 + + +def test_update_sqlatable_metric( + mocker: MockFixture, app_context: None, session: Session +) -> None: + """ + Test that updating a ``SqlaTable`` also updates the corresponding ``Dataset``. + + For this test we check that updating the SQL expression in a metric belonging to a + ``SqlaTable`` is reflected in the ``Dataset`` metric. + """ + # patch session + mocker.patch( + "superset.security.SupersetSecurityManager.get_session", return_value=session + ) + + from superset.columns.models import Column + from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), + ] + metrics = [ + SqlMetric(metric_name="cnt", expression="COUNT(*)"), + ] + sqla_table = SqlaTable( + table_name="old_dataset", + columns=columns, + metrics=metrics, + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + ) + session.add(sqla_table) + session.flush() + + # check that the metric was created + column = session.query(Column).filter_by(is_physical=False).one() + assert column.expression == "COUNT(*)" + + # change the metric definition + sqla_table.metrics[0].expression = "MAX(ds)" + session.flush() + + assert column.expression == "MAX(ds)" + + +def test_update_virtual_sqlatable_references( + mocker: MockFixture, app_context: None, session: Session +) -> None: + """ + Test that changing the SQL of a virtual ``SqlaTable`` updates ``Dataset``. + + When the SQL is modified the list of referenced tables should be updated in the new + ``Dataset`` model. + """ + # patch session + mocker.patch( + "superset.security.SupersetSecurityManager.get_session", return_value=session + ) + + from superset.columns.models import Column + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + table1 = Table( + name="table_a", + schema="my_schema", + catalog=None, + database=database, + columns=[Column(name="a", type="INTEGER")], + ) + table2 = Table( + name="table_b", + schema="my_schema", + catalog=None, + database=database, + columns=[Column(name="b", type="INTEGER")], + ) + session.add(table1) + session.add(table2) + session.commit() + + # create virtual dataset + columns = [TableColumn(column_name="a", type="INTEGER")] + + sqla_table = SqlaTable( + table_name="old_dataset", + columns=columns, + database=database, + schema="my_schema", + sql="SELECT a FROM table_a", + ) + session.add(sqla_table) + session.flush() + + # check that new dataset has table1 + dataset = session.query(Dataset).one() + assert dataset.tables == [table1] + + # change SQL + sqla_table.sql = "SELECT a, b FROM table_a JOIN table_b" + session.flush() + + # check that new dataset has both tables + dataset = session.query(Dataset).one() + assert dataset.tables == [table1, table2] + assert dataset.expression == "SELECT a, b FROM table_a JOIN table_b" + + +def test_quote_expressions(app_context: None, session: Session) -> None: + """ + Test that expressions are quoted appropriately in columns and datasets. + """ + from superset.columns.models import Column + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="has space", type="INTEGER"), + TableColumn(column_name="no_need", type="INTEGER"), + ] + + sqla_table = SqlaTable( + table_name="old dataset", + columns=columns, + metrics=[], + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + ) + session.add(sqla_table) + session.flush() + + dataset = session.query(Dataset).one() + assert dataset.expression == '"old dataset"' + assert dataset.columns[0].expression == '"has space"' + assert dataset.columns[1].expression == "no_need" + + +def test_update_physical_sqlatable( + mocker: MockFixture, app_context: None, session: Session +) -> None: + """ + Test updating the table on a physical dataset. + + When updating the table on a physical dataset by pointing it somewhere else (change + in database ID, schema, or table name) we should point the ``Dataset`` to an + existing ``Table`` if possible, and create a new one otherwise. + """ + # patch session + mocker.patch( + "superset.security.SupersetSecurityManager.get_session", return_value=session + ) + mocker.patch("superset.datasets.dao.db.session", session) + + from superset.columns.models import Column + from superset.connectors.sqla.models import SqlaTable, TableColumn + from superset.datasets.models import Dataset + from superset.models.core import Database + from superset.tables.models import Table + from superset.tables.schemas import TableSchema + + engine = session.get_bind() + Dataset.metadata.create_all(engine) # pylint: disable=no-member + + columns = [ + TableColumn(column_name="a", type="INTEGER"), + ] + + sqla_table = SqlaTable( + table_name="old_dataset", + columns=columns, + metrics=[], + database=Database(database_name="my_database", sqlalchemy_uri="sqlite://"), + ) + session.add(sqla_table) + session.flush() + + # check that the table was created, and that the created dataset points to it + table = session.query(Table).one() + assert table.id == 1 + assert table.name == "old_dataset" + assert table.schema is None + assert table.database_id == 1 + + dataset = session.query(Dataset).one() + assert dataset.tables == [table] + + # point ``SqlaTable`` to a different database + new_database = Database( + database_name="my_other_database", sqlalchemy_uri="sqlite://" + ) + session.add(new_database) + session.flush() + sqla_table.database = new_database + session.flush() + + # ignore these keys when comparing results + ignored_keys = {"created_on", "changed_on", "uuid"} + + # check that the old table still exists, and that the dataset points to the newly + # created table (id=2) and column (id=2), on the new database (also id=2) + table_schema = TableSchema() + tables = [ + {k: v for k, v in table_schema.dump(table).items() if k not in ignored_keys} + for table in session.query(Table).all() + ] + assert tables == [ + { + "created_by": None, + "extra_json": "{}", + "name": "old_dataset", + "changed_by": None, + "catalog": None, + "columns": [1], + "database": 1, + "external_url": None, + "schema": None, + "id": 1, + "is_managed_externally": False, + }, + { + "created_by": None, + "extra_json": "{}", + "name": "old_dataset", + "changed_by": None, + "catalog": None, + "columns": [2], + "database": 2, + "external_url": None, + "schema": None, + "id": 2, + "is_managed_externally": False, + }, + ] + + # check that dataset now points to the new table + assert dataset.tables[0].database_id == 2 + + # point ``SqlaTable`` back + sqla_table.database_id = 1 + session.flush() + + # check that dataset points to the original table + assert dataset.tables[0].database_id == 1 diff --git a/tests/unit_tests/tables/__init__.py b/tests/unit_tests/tables/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/unit_tests/tables/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/tables/test_models.py b/tests/unit_tests/tables/test_models.py new file mode 100644 index 0000000000000..eb1f5f4611248 --- /dev/null +++ b/tests/unit_tests/tables/test_models.py @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=import-outside-toplevel, unused-argument + +from sqlalchemy.orm.session import Session + + +def test_table_model(app_context: None, session: Session) -> None: + """ + Test basic attributes of a ``Table``. + """ + from superset.columns.models import Column + from superset.models.core import Database + from superset.tables.models import Table + + engine = session.get_bind() + Table.metadata.create_all(engine) # pylint: disable=no-member + + table = Table( + name="my_table", + schema="my_schema", + catalog="my_catalog", + database=Database(database_name="my_database", sqlalchemy_uri="test://"), + columns=[Column(name="ds", type="TIMESTAMP", expression="ds",)], + ) + session.add(table) + session.flush() + + assert table.id == 1 + assert table.uuid is not None + assert table.database_id == 1 + assert table.catalog == "my_catalog" + assert table.schema == "my_schema" + assert table.name == "my_table" + assert [column.name for column in table.columns] == ["ds"] From 822dd6de5db814f1c2b22799777c7b3cb2a358e6 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Thu, 24 Feb 2022 11:34:34 -0800 Subject: [PATCH 07/37] chore: update UPDATING re: SIP-68 (#18935) --- UPDATING.md | 1 + 1 file changed, 1 insertion(+) diff --git a/UPDATING.md b/UPDATING.md index d07cad3be5f12..667a47ca991f0 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -38,6 +38,7 @@ assists people when migrating to a new version. (init, load_examples and etc) require setting the FLASK_APP environment variable (which is set by default when .flaskenv is loaded) - [17360](https://github.com/apache/superset/pull/17360): changes the column type from `VARCHAR(32)` to `TEXT` in table `table_columns`, potentially requiring a table lock on MySQL dbs or taking some time to complete on large deployments. +- [17543](https://github.com/apache/superset/pull/17543): introduces new models from SIP-68. The database migration migrates the old models (`SqlaTable`, `TableColumn`, `SqlMetric`) to the new models (`Column`, `Table`, `Dataset`), and the PR introduces logic to keep the old models in sync with the new ones until they are fully removed. The migration might take considerable time depending on the number of datasets. ### Deprecations From 209e3f45548ae8d0b3ac63d2c393883b733d2b22 Mon Sep 17 00:00:00 2001 From: "Hugh A. Miles II" Date: Thu, 24 Feb 2022 13:00:44 -0800 Subject: [PATCH 08/37] =?UTF-8?q?feat:=20DBC-UI=20Globally=20available=20a?= =?UTF-8?q?cross=20the=20app=20=F0=9F=8C=8E=20(#18722)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * more data nav menu * fix lint and fix nav css * update test and remove icons * Update superset-frontend/src/views/components/Menu.test.tsx Co-authored-by: Hugh A. Miles II * Apply suggestions from code review * use backend app.link to show new nav changes * fix lint * update test * usetheme and remove chaining * add more suggestions * fix lint * working global db connection * add allowed extensions to bootstrap and hard code links * remove backend links * fix test * apply stashed gsheets * fix check for google sheets * setup gsheets * add extensions to frontend conf * fix test and add be changes * remove package json changes * test is python test passes * update python test and reremove app links * fix tslint issues * fix other linting tools * fix pylint * fix test * fix * refactor * fix lint * working fixed test * clean up test * address concerns * address concerns * change to tenarary Co-authored-by: Phillip Kelley-Dotson --- .../src/views/components/Menu.test.tsx | 99 +++++++-- .../src/views/components/Menu.tsx | 2 +- .../src/views/components/MenuRight.tsx | 189 ++++++++++-------- .../src/views/components/types.ts | 38 ++++ superset/views/base.py | 6 + 5 files changed, 232 insertions(+), 102 deletions(-) create mode 100644 superset-frontend/src/views/components/types.ts diff --git a/superset-frontend/src/views/components/Menu.test.tsx b/superset-frontend/src/views/components/Menu.test.tsx index b990ddc1804a9..d13275fbc0b57 100644 --- a/superset-frontend/src/views/components/Menu.test.tsx +++ b/superset-frontend/src/views/components/Menu.test.tsx @@ -21,7 +21,64 @@ import * as reactRedux from 'react-redux'; import { render, screen } from 'spec/helpers/testing-library'; import userEvent from '@testing-library/user-event'; import { Menu } from './Menu'; -import { dropdownItems } from './MenuRight'; + +const dropdownItems = [ + { + label: 'Data', + icon: 'fa-database', + childs: [ + { + label: 'Connect Database', + name: 'dbconnect', + perm: true, + }, + { + label: 'Connect Google Sheet', + name: 'gsheets', + perm: true, + }, + { + label: 'Upload a CSV', + name: 'Upload a CSV', + url: '/csvtodatabaseview/form', + perm: true, + }, + { + label: 'Upload a Columnar File', + name: 'Upload a Columnar file', + url: '/columnartodatabaseview/form', + perm: true, + }, + { + label: 'Upload Excel', + name: 'Upload Excel', + url: '/exceltodatabaseview/form', + perm: true, + }, + ], + }, + { + label: 'SQL query', + url: '/superset/sqllab?new=true', + icon: 'fa-fw fa-search', + perm: 'can_sqllab', + view: 'Superset', + }, + { + label: 'Chart', + url: '/chart/add', + icon: 'fa-fw fa-bar-chart', + perm: 'can_write', + view: 'Chart', + }, + { + label: 'Dashboard', + url: '/dashboard/new', + icon: 'fa-fw fa-dashboard', + perm: 'can_write', + view: 'Dashboard', + }, +]; const user = { createdOn: '2021-04-27T18:12:38.952304', @@ -185,13 +242,13 @@ beforeEach(() => { test('should render', () => { useSelectorMock.mockReturnValue({ roles: user.roles }); - const { container } = render(); + const { container } = render(, { useRedux: true }); expect(container).toBeInTheDocument(); }); test('should render the navigation', () => { useSelectorMock.mockReturnValue({ roles: user.roles }); - render(); + render(, { useRedux: true }); expect(screen.getByRole('navigation')).toBeInTheDocument(); }); @@ -202,7 +259,7 @@ test('should render the brand', () => { brand: { alt, icon }, }, } = mockedProps; - render(); + render(, { useRedux: true }); const image = screen.getByAltText(alt); expect(image).toHaveAttribute('src', icon); }); @@ -212,7 +269,7 @@ test('should render all the top navbar menu items', () => { const { data: { menu }, } = mockedProps; - render(); + render(, { useRedux: true }); menu.forEach(item => { expect(screen.getByText(item.label)).toBeInTheDocument(); }); @@ -223,7 +280,7 @@ test('should render the top navbar child menu items', async () => { const { data: { menu }, } = mockedProps; - render(); + render(, { useRedux: true }); const sources = screen.getByText('Sources'); userEvent.hover(sources); const datasets = await screen.findByText('Datasets'); @@ -237,7 +294,7 @@ test('should render the top navbar child menu items', async () => { test('should render the dropdown items', async () => { useSelectorMock.mockReturnValue({ roles: user.roles }); - render(); + render(, { useRedux: true }); const dropdown = screen.getByTestId('new-dropdown-icon'); userEvent.hover(dropdown); // todo (philip): test data submenu @@ -263,14 +320,14 @@ test('should render the dropdown items', async () => { test('should render the Settings', async () => { useSelectorMock.mockReturnValue({ roles: user.roles }); - render(); + render(, { useRedux: true }); const settings = await screen.findByText('Settings'); expect(settings).toBeInTheDocument(); }); test('should render the Settings menu item', async () => { useSelectorMock.mockReturnValue({ roles: user.roles }); - render(); + render(, { useRedux: true }); userEvent.hover(screen.getByText('Settings')); const label = await screen.findByText('Security'); expect(label).toBeInTheDocument(); @@ -281,7 +338,7 @@ test('should render the Settings dropdown child menu items', async () => { const { data: { settings }, } = mockedProps; - render(); + render(, { useRedux: true }); userEvent.hover(screen.getByText('Settings')); const listUsers = await screen.findByText('List Users'); expect(listUsers).toHaveAttribute('href', settings[0].childs[0].url); @@ -289,13 +346,13 @@ test('should render the Settings dropdown child menu items', async () => { test('should render the plus menu (+) when user is not anonymous', () => { useSelectorMock.mockReturnValue({ roles: user.roles }); - render(); + render(, { useRedux: true }); expect(screen.getByTestId('new-dropdown')).toBeInTheDocument(); }); test('should NOT render the plus menu (+) when user is anonymous', () => { useSelectorMock.mockReturnValue({ roles: user.roles }); - render(); + render(, { useRedux: true }); expect(screen.queryByTestId('new-dropdown')).not.toBeInTheDocument(); }); @@ -307,7 +364,7 @@ test('should render the user actions when user is not anonymous', async () => { }, } = mockedProps; - render(); + render(, { useRedux: true }); userEvent.hover(screen.getByText('Settings')); const user = await screen.findByText('User'); expect(user).toBeInTheDocument(); @@ -321,7 +378,7 @@ test('should render the user actions when user is not anonymous', async () => { test('should NOT render the user actions when user is anonymous', () => { useSelectorMock.mockReturnValue({ roles: user.roles }); - render(); + render(, { useRedux: true }); expect(screen.queryByText('User')).not.toBeInTheDocument(); }); @@ -333,7 +390,7 @@ test('should render the Profile link when available', async () => { }, } = mockedProps; - render(); + render(, { useRedux: true }); userEvent.hover(screen.getByText('Settings')); const profile = await screen.findByText('Profile'); @@ -348,7 +405,7 @@ test('should render the About section and version_string, sha or build_number wh }, } = mockedProps; - render(); + render(, { useRedux: true }); userEvent.hover(screen.getByText('Settings')); const about = await screen.findByText('About'); const version = await screen.findByText(`Version: ${version_string}`); @@ -367,7 +424,7 @@ test('should render the Documentation link when available', async () => { navbar_right: { documentation_url }, }, } = mockedProps; - render(); + render(, { useRedux: true }); userEvent.hover(screen.getByText('Settings')); const doc = await screen.findByTitle('Documentation'); expect(doc).toHaveAttribute('href', documentation_url); @@ -381,7 +438,7 @@ test('should render the Bug Report link when available', async () => { }, } = mockedProps; - render(); + render(, { useRedux: true }); const bugReport = await screen.findByTitle('Report a bug'); expect(bugReport).toHaveAttribute('href', bug_report_url); }); @@ -394,19 +451,19 @@ test('should render the Login link when user is anonymous', () => { }, } = mockedProps; - render(); + render(, { useRedux: true }); const login = screen.getByText('Login'); expect(login).toHaveAttribute('href', user_login_url); }); test('should render the Language Picker', () => { useSelectorMock.mockReturnValue({ roles: user.roles }); - render(); + render(, { useRedux: true }); expect(screen.getByLabelText('Languages')).toBeInTheDocument(); }); test('should hide create button without proper roles', () => { useSelectorMock.mockReturnValue({ roles: [] }); - render(); + render(, { useRedux: true }); expect(screen.queryByTestId('new-dropdown')).not.toBeInTheDocument(); }); diff --git a/superset-frontend/src/views/components/Menu.tsx b/superset-frontend/src/views/components/Menu.tsx index cc98130d4d400..5231d27f21256 100644 --- a/superset-frontend/src/views/components/Menu.tsx +++ b/superset-frontend/src/views/components/Menu.tsx @@ -79,7 +79,7 @@ interface MenuObjectChildProps { index?: number; url?: string; isFrontendRoute?: boolean; - perm?: string; + perm?: string | boolean; view?: string; } diff --git a/superset-frontend/src/views/components/MenuRight.tsx b/superset-frontend/src/views/components/MenuRight.tsx index db7e2fae63897..8222a978c008b 100644 --- a/superset-frontend/src/views/components/MenuRight.tsx +++ b/superset-frontend/src/views/components/MenuRight.tsx @@ -16,67 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -import React from 'react'; +import React, { useState } from 'react'; import { MainNav as Menu } from 'src/common/components'; import { t, styled, css, SupersetTheme } from '@superset-ui/core'; import { Link } from 'react-router-dom'; import Icons from 'src/components/Icons'; import findPermission from 'src/dashboard/util/findPermission'; import { useSelector } from 'react-redux'; -import { - UserWithPermissionsAndRoles, - CommonBootstrapData, -} from 'src/types/bootstrapTypes'; +import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; import LanguagePicker from './LanguagePicker'; -import { NavBarProps, MenuObjectProps } from './Menu'; - -export const dropdownItems: MenuObjectProps[] = [ - { - label: t('Data'), - icon: 'fa-database', - childs: [ - { - icon: 'fa-upload', - label: t('Upload a CSV'), - name: 'Upload a CSV', - url: '/csvtodatabaseview/form', - }, - { - icon: 'fa-upload', - label: t('Upload a Columnar File'), - name: 'Upload a Columnar file', - url: '/columnartodatabaseview/form', - }, - { - icon: 'fa-upload', - label: t('Upload Excel'), - name: 'Upload Excel', - url: '/exceltodatabaseview/form', - }, - ], - }, - { - label: t('SQL query'), - url: '/superset/sqllab?new=true', - icon: 'fa-fw fa-search', - perm: 'can_sqllab', - view: 'Superset', - }, - { - label: t('Chart'), - url: '/chart/add', - icon: 'fa-fw fa-bar-chart', - perm: 'can_write', - view: 'Chart', - }, - { - label: t('Dashboard'), - url: '/dashboard/new', - icon: 'fa-fw fa-dashboard', - perm: 'can_write', - view: 'Dashboard', - }, -]; +import DatabaseModal from '../CRUD/data/database/DatabaseModal'; +import { + ExtentionConfigs, + GlobalMenuDataOptions, + RightMenuProps, +} from './types'; +import { MenuObjectProps } from './Menu'; const versionInfoStyles = (theme: SupersetTheme) => css` padding: ${theme.gridUnit * 1.5}px ${theme.gridUnit * 4}px @@ -107,13 +62,6 @@ const StyledAnchor = styled.a` const { SubMenu } = Menu; -interface RightMenuProps { - align: 'flex-start' | 'flex-end'; - settings: MenuObjectProps[]; - navbarRight: NavBarProps; - isFrontendRoute: (path?: string) => boolean; -} - const RightMenu = ({ align, settings, @@ -123,30 +71,106 @@ const RightMenu = ({ const { roles } = useSelector( state => state.user, ); - // @ts-ignore - const { CSV_EXTENSIONS, COLUMNAR_EXTENSIONS, EXCEL_EXTENSIONS } = useSelector< - any, - CommonBootstrapData - >(state => state.common.conf); - // if user has any of these roles the dropdown will appear - const configMap = { - 'Upload a CSV': CSV_EXTENSIONS, - 'Upload a Columnar file': COLUMNAR_EXTENSIONS, - 'Upload Excel': EXCEL_EXTENSIONS, - }; + const { + CSV_EXTENSIONS, + COLUMNAR_EXTENSIONS, + EXCEL_EXTENSIONS, + HAS_GSHEETS_INSTALLED, + } = useSelector(state => state.common.conf); + + const [showModal, setShowModal] = useState(false); + const [engine, setEngine] = useState(''); const canSql = findPermission('can_sqllab', 'Superset', roles); const canDashboard = findPermission('can_write', 'Dashboard', roles); const canChart = findPermission('can_write', 'Chart', roles); const showActionDropdown = canSql || canChart || canDashboard; + const dropdownItems: MenuObjectProps[] = [ + { + label: t('Data'), + icon: 'fa-database', + childs: [ + { + label: t('Connect Database'), + name: GlobalMenuDataOptions.DB_CONNECTION, + perm: true, + }, + { + label: t('Connect Google Sheet'), + name: GlobalMenuDataOptions.GOOGLE_SHEETS, + perm: HAS_GSHEETS_INSTALLED, + }, + { + label: t('Upload a CSV'), + name: 'Upload a CSV', + url: '/csvtodatabaseview/form', + perm: CSV_EXTENSIONS, + }, + { + label: t('Upload a Columnar File'), + name: 'Upload a Columnar file', + url: '/columnartodatabaseview/form', + perm: COLUMNAR_EXTENSIONS, + }, + { + label: t('Upload Excel'), + name: 'Upload Excel', + url: '/exceltodatabaseview/form', + perm: EXCEL_EXTENSIONS, + }, + ], + }, + { + label: t('SQL query'), + url: '/superset/sqllab?new=true', + icon: 'fa-fw fa-search', + perm: 'can_sqllab', + view: 'Superset', + }, + { + label: t('Chart'), + url: '/chart/add', + icon: 'fa-fw fa-bar-chart', + perm: 'can_write', + view: 'Chart', + }, + { + label: t('Dashboard'), + url: '/dashboard/new', + icon: 'fa-fw fa-dashboard', + perm: 'can_write', + view: 'Dashboard', + }, + ]; + const menuIconAndLabel = (menu: MenuObjectProps) => ( <> {menu.label} ); + + const handleMenuSelection = (itemChose: any) => { + if (itemChose.key === GlobalMenuDataOptions.DB_CONNECTION) { + setShowModal(true); + } else if (itemChose.key === GlobalMenuDataOptions.GOOGLE_SHEETS) { + setShowModal(true); + setEngine('Google Sheets'); + } + }; + + const handleOnHideModal = () => { + setEngine(''); + setShowModal(false); + }; + return ( - + + {!navbarRight.user_is_anonymous && showActionDropdown && ( - {menu.childs.map(item => - typeof item !== 'string' && - item.name && - configMap[item.name] === true ? ( - - {item.label} - + {menu.childs.map((item, idx) => + typeof item !== 'string' && item.name && item.perm ? ( + <> + {idx === 2 && } + + {item.url ? ( + {item.label} + ) : ( + item.label + )} + + ) : null, )} diff --git a/superset-frontend/src/views/components/types.ts b/superset-frontend/src/views/components/types.ts new file mode 100644 index 0000000000000..0eff33390a727 --- /dev/null +++ b/superset-frontend/src/views/components/types.ts @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { NavBarProps, MenuObjectProps } from './Menu'; + +export interface ExtentionConfigs { + CSV_EXTENSIONS: boolean; + COLUMNAR_EXTENSIONS: boolean; + EXCEL_EXTENSIONS: boolean; + HAS_GSHEETS_INSTALLED: boolean; +} +export interface RightMenuProps { + align: 'flex-start' | 'flex-end'; + settings: MenuObjectProps[]; + navbarRight: NavBarProps; + isFrontendRoute: (path?: string) => boolean; +} + +export enum GlobalMenuDataOptions { + GOOGLE_SHEETS = 'gsheets', + DB_CONNECTION = 'dbconnection', +} diff --git a/superset/views/base.py b/superset/views/base.py index 30acd51c8b6c0..067e8d74a727a 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -62,6 +62,8 @@ from superset.commands.exceptions import CommandException, CommandInvalidError from superset.connectors.sqla import models from superset.datasets.commands.exceptions import get_dataset_exist_error_msg +from superset.db_engine_specs import get_available_engine_specs +from superset.db_engine_specs.gsheets import GSheetsEngineSpec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import ( SupersetErrorException, @@ -366,6 +368,10 @@ def common_bootstrap_payload() -> Dict[str, Any]: ReportRecipientType.EMAIL, ] + # verify client has google sheets installed + available_specs = get_available_engine_specs() + frontend_config["HAS_GSHEETS_INSTALLED"] = bool(available_specs[GSheetsEngineSpec]) + bootstrap_data = { "flash_messages": messages, "conf": frontend_config, From 420a63f3436fb01084720aa9fdc863c1766a3262 Mon Sep 17 00:00:00 2001 From: Erik Ritter Date: Thu, 24 Feb 2022 13:01:02 -0800 Subject: [PATCH 09/37] fix(sqllab): Un-render deselected tabs (#18934) --- .../src/SqlLab/components/TabbedSqlEditors/index.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx index ea400b8b91c73..252051b69d63e 100644 --- a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx +++ b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx @@ -403,6 +403,7 @@ class TabbedSqlEditors extends React.PureComponent { return ( Date: Thu, 24 Feb 2022 17:42:55 -0800 Subject: [PATCH 10/37] chore: add intersect method from backend to fontend for upload extensions (#18811) * chore: add intersect method from be to font end * fix lint * add suggestion * fix python test * run precommit * fix pytlint * update changes from masters --- .../src/views/CRUD/utils.test.tsx | 17 +++++++++++++++++ superset-frontend/src/views/CRUD/utils.tsx | 12 ++++++++++++ .../src/views/components/Menu.tsx | 2 +- .../src/views/components/MenuRight.tsx | 6 +++++- .../src/views/components/types.ts | 7 ++++--- superset/views/base.py | 18 ++++++++---------- 6 files changed, 47 insertions(+), 15 deletions(-) diff --git a/superset-frontend/src/views/CRUD/utils.test.tsx b/superset-frontend/src/views/CRUD/utils.test.tsx index ce7139e82a99a..68d377e3102cf 100644 --- a/superset-frontend/src/views/CRUD/utils.test.tsx +++ b/superset-frontend/src/views/CRUD/utils.test.tsx @@ -23,6 +23,7 @@ import { getPasswordsNeeded, getAlreadyExists, hasTerminalValidation, + checkUploadExtensions, } from 'src/views/CRUD/utils'; const terminalErrors = { @@ -186,3 +187,19 @@ test('successfully modified rison to encode correctly', () => { expect(rison.decode(actualEncoding)).toEqual(testObject); }); }); + +test('checkUploadExtenssions should return valid upload extensions', () => { + const uploadExtensionTest = ['a', 'b', 'c']; + const randomExtension = ['a', 'c']; + const randomExtensionTwo = ['c']; + const randomExtensionThree: Array = []; + expect( + checkUploadExtensions(randomExtension, uploadExtensionTest), + ).toBeTruthy(); + expect( + checkUploadExtensions(randomExtensionTwo, uploadExtensionTest), + ).toBeTruthy(); + expect( + checkUploadExtensions(randomExtensionThree, uploadExtensionTest), + ).toBeFalsy(); +}); diff --git a/superset-frontend/src/views/CRUD/utils.tsx b/superset-frontend/src/views/CRUD/utils.tsx index 174e1aa493108..d9d21c8565d17 100644 --- a/superset-frontend/src/views/CRUD/utils.tsx +++ b/superset-frontend/src/views/CRUD/utils.tsx @@ -27,6 +27,7 @@ import { css, } from '@superset-ui/core'; import Chart from 'src/types/Chart'; +import { intersection } from 'lodash'; import rison from 'rison'; import { getClientErrorObject } from 'src/utils/getClientErrorObject'; import { FetchDataConfig } from 'src/components/ListView'; @@ -409,3 +410,14 @@ export const hasTerminalValidation = (errors: Record[]) => isNeedsPassword(payload) || isAlreadyExists(payload), ), ); + +export const checkUploadExtensions = ( + perm: Array | string | undefined | boolean, + cons: Array, +) => { + if (perm !== undefined) { + if (typeof perm === 'boolean') return perm; + return intersection(perm, cons).length; + } + return false; +}; diff --git a/superset-frontend/src/views/components/Menu.tsx b/superset-frontend/src/views/components/Menu.tsx index 5231d27f21256..313c4b5cac071 100644 --- a/superset-frontend/src/views/components/Menu.tsx +++ b/superset-frontend/src/views/components/Menu.tsx @@ -79,7 +79,7 @@ interface MenuObjectChildProps { index?: number; url?: string; isFrontendRoute?: boolean; - perm?: string | boolean; + perm?: string | Array | boolean; view?: string; } diff --git a/superset-frontend/src/views/components/MenuRight.tsx b/superset-frontend/src/views/components/MenuRight.tsx index 8222a978c008b..fe55e832b7338 100644 --- a/superset-frontend/src/views/components/MenuRight.tsx +++ b/superset-frontend/src/views/components/MenuRight.tsx @@ -26,6 +26,7 @@ import { useSelector } from 'react-redux'; import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; import LanguagePicker from './LanguagePicker'; import DatabaseModal from '../CRUD/data/database/DatabaseModal'; +import { checkUploadExtensions } from '../CRUD/utils'; import { ExtentionConfigs, GlobalMenuDataOptions, @@ -75,6 +76,7 @@ const RightMenu = ({ CSV_EXTENSIONS, COLUMNAR_EXTENSIONS, EXCEL_EXTENSIONS, + ALLOWED_EXTENSIONS, HAS_GSHEETS_INSTALLED, } = useSelector(state => state.common.conf); @@ -188,7 +190,9 @@ const RightMenu = ({ title={menuIconAndLabel(menu)} > {menu.childs.map((item, idx) => - typeof item !== 'string' && item.name && item.perm ? ( + typeof item !== 'string' && + item.name && + checkUploadExtensions(item.perm, ALLOWED_EXTENSIONS) ? ( <> {idx === 2 && } diff --git a/superset-frontend/src/views/components/types.ts b/superset-frontend/src/views/components/types.ts index 0eff33390a727..d46c2e1619132 100644 --- a/superset-frontend/src/views/components/types.ts +++ b/superset-frontend/src/views/components/types.ts @@ -20,9 +20,10 @@ import { NavBarProps, MenuObjectProps } from './Menu'; export interface ExtentionConfigs { - CSV_EXTENSIONS: boolean; - COLUMNAR_EXTENSIONS: boolean; - EXCEL_EXTENSIONS: boolean; + ALLOWED_EXTENSIONS: Array; + CSV_EXTENSIONS: Array; + COLUMNAR_EXTENSIONS: Array; + EXCEL_EXTENSIONS: Array; HAS_GSHEETS_INSTALLED: boolean; } export interface RightMenuProps { diff --git a/superset/views/base.py b/superset/views/base.py index 067e8d74a727a..1249bc43cc4fb 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -104,6 +104,10 @@ "GLOBAL_ASYNC_QUERIES_WEBSOCKET_URL", "DASHBOARD_AUTO_REFRESH_MODE", "SCHEDULED_QUERIES", + "EXCEL_EXTENSIONS", + "CSV_EXTENSIONS", + "COLUMNAR_EXTENSIONS", + "ALLOWED_EXTENSIONS", ) logger = logging.getLogger(__name__) @@ -347,16 +351,10 @@ def common_bootstrap_payload() -> Dict[str, Any]: locale = str(get_locale()) # should not expose API TOKEN to frontend - frontend_config = {k: conf.get(k) for k in FRONTEND_CONF_KEYS} - frontend_config["EXCEL_EXTENSIONS"] = bool( - bool(conf["EXCEL_EXTENSIONS"].intersection(conf["ALLOWED_EXTENSIONS"])), - ) - frontend_config["CSV_EXTENSIONS"] = bool( - bool(conf["CSV_EXTENSIONS"].intersection(conf["ALLOWED_EXTENSIONS"])), - ) - frontend_config["COLUMNAR_EXTENSIONS"] = bool( - bool(conf["COLUMNAR_EXTENSIONS"].intersection(conf["ALLOWED_EXTENSIONS"])), - ) + frontend_config = { + k: (list(conf.get(k)) if isinstance(conf.get(k), set) else conf.get(k)) + for k in FRONTEND_CONF_KEYS + } if conf.get("SLACK_API_TOKEN"): frontend_config["ALERT_REPORTS_NOTIFICATION_METHODS"] = [ From 0db49b1a7c0243beda9aadf146065d2ce999085e Mon Sep 17 00:00:00 2001 From: Smart-Codi Date: Fri, 25 Feb 2022 06:16:31 +0000 Subject: [PATCH 11/37] fix Metadata browser in SQL not render nicely in Safari (#18855) --- .../src/SqlLab/components/ColumnElement/index.tsx | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/superset-frontend/src/SqlLab/components/ColumnElement/index.tsx b/superset-frontend/src/SqlLab/components/ColumnElement/index.tsx index 9dc0583b423de..481c1067f13fc 100644 --- a/superset-frontend/src/SqlLab/components/ColumnElement/index.tsx +++ b/superset-frontend/src/SqlLab/components/ColumnElement/index.tsx @@ -75,6 +75,10 @@ interface ColumnElementProps { }; } +const NowrapDiv = styled.div` + white-space: nowrap; +`; + const ColumnElement = ({ column }: ColumnElementProps) => { let columnName: React.ReactNode = column.name; let icons; @@ -105,9 +109,9 @@ const ColumnElement = ({ column }: ColumnElementProps) => { {columnName} {icons} -
+ {column.type} -
+ ); }; From 4923256b01305bb327be5bd52eab26234baa174f Mon Sep 17 00:00:00 2001 From: Smart-Codi Date: Fri, 25 Feb 2022 06:20:01 +0000 Subject: [PATCH 12/37] fix: Monospacing errors in dashboards & charts (#18796) * fix: Monospacing errors in dashboards & charts * removed unnecessary styling --- superset-frontend/src/chart/Chart.jsx | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/superset-frontend/src/chart/Chart.jsx b/superset-frontend/src/chart/Chart.jsx index 6e1003f79a1cf..7fb5f47808f7c 100644 --- a/superset-frontend/src/chart/Chart.jsx +++ b/superset-frontend/src/chart/Chart.jsx @@ -113,6 +113,14 @@ const RefreshOverlayWrapper = styled.div` justify-content: center; `; +const MonospaceDiv = styled.div` + font-family: ${({ theme }) => theme.typography.families.monospace}; + white-space: pre; + word-break: break-word; + overflow-x: auto; + white-space: pre-wrap; +`; + class Chart extends React.PureComponent { constructor(props) { super(props); @@ -224,7 +232,7 @@ class Chart extends React.PureComponent { key={chartId} chartId={chartId} error={error} - subtitle={message} + subtitle={{message}} copyText={message} link={queryResponse ? queryResponse.link : null} source={dashboardId ? 'dashboard' : 'explore'} From 2bd8d3b3491aae58738ec403d092bf936a208244 Mon Sep 17 00:00:00 2001 From: Jinghuayao <81597121+jinghua-qa@users.noreply.github.com> Date: Fri, 25 Feb 2022 00:27:26 -0800 Subject: [PATCH 13/37] test(Native Filter): User can create parent filters using "Filter is hierarchical (#18915) * add new native test for parent filter --- .../dashboard/nativeFilters.test.ts | 114 +++++++++++++++++- .../cypress/support/directories.ts | 5 + 2 files changed, 118 insertions(+), 1 deletion(-) diff --git a/superset-frontend/cypress-base/cypress/integration/dashboard/nativeFilters.test.ts b/superset-frontend/cypress-base/cypress/integration/dashboard/nativeFilters.test.ts index 414f14df5c598..91f39f581c33e 100644 --- a/superset-frontend/cypress-base/cypress/integration/dashboard/nativeFilters.test.ts +++ b/superset-frontend/cypress-base/cypress/integration/dashboard/nativeFilters.test.ts @@ -22,7 +22,11 @@ import { nativeFilters, exploreView, } from 'cypress/support/directories'; -import { testItems } from './dashboard.helper'; +import { + testItems, + WORLD_HEALTH_CHARTS, + waitForChartLoad, +} from './dashboard.helper'; import { DASHBOARD_LIST } from '../dashboard_list/dashboard_list.helper'; import { CHART_LIST } from '../chart_list/chart_list.helper'; import { FORM_DATA_DEFAULTS } from '../explore/visualizations/shared.helper'; @@ -542,6 +546,114 @@ describe('Nativefilters Sanity test', () => { .contains('year') .should('be.visible'); }); + it('User can create parent filters using "Filter is hierarchical"', () => { + cy.get(nativeFilters.filterFromDashboardView.expand).click({ force: true }); + // Create region filter + cy.get(nativeFilters.filterFromDashboardView.createFilterButton) + .should('be.visible') + .click(); + cy.get(nativeFilters.filtersPanel.filterTypeInput) + .find(nativeFilters.filtersPanel.filterTypeItem) + .click({ force: true }); + cy.get('[label="Value"]').click(); + cy.get(nativeFilters.modal.container) + .find(nativeFilters.filtersPanel.datasetName) + .click({ force: true }) + .within(() => + cy + .get('input') + .type('wb_health_population{enter}', { delay: 50, force: true }), + ); + cy.get(nativeFilters.modal.container) + .find(nativeFilters.filtersPanel.filterName) + .click({ force: true }) + .clear() + .type('region', { scrollBehavior: false, force: true }); + cy.wait(3000); + cy.get(nativeFilters.silentLoading).should('not.exist'); + cy.get(nativeFilters.filtersPanel.filterInfoInput) + .last() + .should('be.visible') + .click({ force: true }); + cy.get(nativeFilters.filtersPanel.filterInfoInput).last().type('region'); + cy.get(nativeFilters.filtersPanel.inputDropdown) + .last() + .should('be.visible', { timeout: 20000 }) + .click({ force: true }); + // Create country filter + cy.get(nativeFilters.addFilterButton.button) + .first() + .click({ force: true }) + .then(() => { + cy.get(nativeFilters.addFilterButton.dropdownItem) + .contains('Filter') + .click({ force: true }); + }); + cy.get(nativeFilters.filtersPanel.filterTypeInput) + .find(nativeFilters.filtersPanel.filterTypeItem) + .last() + .click({ force: true }); + cy.get('[label="Value"]').last().click({ force: true }); + cy.get(nativeFilters.modal.container) + .find(nativeFilters.filtersPanel.datasetName) + .last() + .click({ scrollBehavior: false }) + .within(() => + cy + .get('input') + .clear({ force: true }) + .type('wb_health_population{enter}', { + delay: 50, + force: true, + scrollBehavior: false, + }), + ); + cy.get(nativeFilters.filtersPanel.filterInfoInput) + .last() + .should('be.visible') + .click({ force: true }); + cy.get(nativeFilters.filtersPanel.inputDropdown) + .should('be.visible', { timeout: 20000 }) + .last() + .click(); + cy.get(nativeFilters.modal.container) + .find(nativeFilters.filtersPanel.filterName) + .last() + .click({ force: true }) + .type('country', { scrollBehavior: false, force: true }); + cy.get(nativeFilters.silentLoading).should('not.exist'); + cy.get(nativeFilters.filtersPanel.filterInfoInput) + .last() + .click({ force: true }); + cy.get(nativeFilters.filtersPanel.filterInfoInput) + .last() + .type('country_name', { delay: 50, scrollBehavior: false, force: true }); + cy.get(nativeFilters.filtersPanel.inputDropdown) + .last() + .should('be.visible', { timeout: 20000 }) + .click({ force: true }); + // Setup parent filter + cy.get(nativeFilters.filterConfigurationSections.displayedSection).within( + () => { + cy.contains('Filter is hierarchical').should('be.visible').click(); + cy.wait(1000); + cy.get(nativeFilters.filterConfigurationSections.parentFilterInput) + .click() + .type('region{enter}', { delay: 30 }); + }, + ); + cy.get(nativeFilters.modal.footer) + .contains('Save') + .should('be.visible') + .click(); + WORLD_HEALTH_CHARTS.forEach(waitForChartLoad); + // assert that native filter is created + cy.get(nativeFilters.filterFromDashboardView.filterName) + .should('be.visible') + .contains('region'); + cy.get(nativeFilters.filterIcon).click(); + cy.contains('Select parent filters (2)').should('be.visible'); + }); }); xdescribe('Nativefilters', () => { diff --git a/superset-frontend/cypress-base/cypress/support/directories.ts b/superset-frontend/cypress-base/cypress/support/directories.ts index b68ec36c74bfb..d7755590b3d6c 100644 --- a/superset-frontend/cypress-base/cypress/support/directories.ts +++ b/superset-frontend/cypress-base/cypress/support/directories.ts @@ -327,6 +327,10 @@ export const nativeFilters = { addFilter: dataTestLocator('add-filter-button'), defaultValueCheck: '.ant-checkbox-checked', }, + addFilterButton: { + button: `.ant-modal-content [data-test="new-dropdown-icon"]`, + dropdownItem: '.ant-dropdown-menu-item', + }, filtersPanel: { filterName: dataTestLocator('filters-config-modal__name-input'), datasetName: dataTestLocator('filters-config-modal__datasource-input'), @@ -350,6 +354,7 @@ export const nativeFilters = { removeFilter: '[aria-label="remove"]', silentLoading: '.loading inline-centered css-101mkpk', filterConfigurationSections: { + displayedSection: 'div[style="height: 100%; overflow-y: auto;"]', collapseExpandButton: '.ant-collapse-arrow', checkedCheckbox: '.ant-checkbox-wrapper-checked', infoTooltip: '[aria-label="Show info tooltip"]', From 0b1bc5b5de4a71e332d9ea6deaca6c1967042406 Mon Sep 17 00:00:00 2001 From: Stephen Liu <750188453@qq.com> Date: Fri, 25 Feb 2022 17:13:36 +0800 Subject: [PATCH 14/37] fix(native-filter): fix required filters of a dashboard won't load properly (#18695) --- .../dashboard/components/nativeFilters/FilterBar/index.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx index 3ae365007a113..6aef6e3069a7e 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx @@ -262,10 +262,13 @@ const FilterBar: React.FC = ({ if (previousFilters) { const updates = {}; Object.values(filters).forEach(currentFilter => { + const previousFilter = previousFilters?.[currentFilter.id]; + if (!previousFilter) { + return; + } const currentType = currentFilter.filterType; const currentTargets = currentFilter.targets; const currentDataMask = currentFilter.defaultDataMask; - const previousFilter = previousFilters?.[currentFilter.id]; const previousType = previousFilter?.filterType; const previousTargets = previousFilter?.targets; const previousDataMask = previousFilter?.defaultDataMask; From 2cb3635256ee8e91f0bac2f3091684673c04ff2b Mon Sep 17 00:00:00 2001 From: Stephen Liu <750188453@qq.com> Date: Fri, 25 Feb 2022 18:26:49 +0800 Subject: [PATCH 15/37] fix(deck.gl): multiple layers map size is shrunk (#18939) --- .../plugins/legacy-preset-chart-deckgl/src/Multi/Multi.jsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/Multi.jsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/Multi.jsx index 5af92b3a55017..e9f107ed95869 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/Multi.jsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/Multi/Multi.jsx @@ -120,7 +120,7 @@ class DeckMulti extends React.PureComponent { }; render() { - const { payload, formData, setControlValue } = this.props; + const { payload, formData, setControlValue, height, width } = this.props; const { subSlicesLayers } = this.state; const layers = Object.values(subSlicesLayers); @@ -134,6 +134,8 @@ class DeckMulti extends React.PureComponent { mapStyle={formData.mapbox_style} setControlValue={setControlValue} onViewportChange={this.onViewportChange} + height={height} + width={width} /> ); } From fd4bd1edbc306d930d186f1650e809b7f15981d0 Mon Sep 17 00:00:00 2001 From: "Michael S. Molina" <70410625+michael-s-molina@users.noreply.github.com> Date: Fri, 25 Feb 2022 10:17:32 -0300 Subject: [PATCH 16/37] chore: Moves components out of the commons folder (#18931) * refactor: Moves components out of the commons folder * Fixes tests --- .../components/RunQueryActionButton/index.tsx | 2 +- .../SaveDatasetModal.test.tsx | 3 +- .../components/SaveDatasetModal/index.tsx | 3 +- .../src/SqlLab/components/SaveQuery/index.tsx | 3 +- .../components/ScheduleQueryButton/index.tsx | 3 +- .../src/SqlLab/components/SqlEditor/index.jsx | 21 +- .../components/TabbedSqlEditors/index.jsx | 2 +- .../src/common/components/index.tsx | 195 ------------------ .../CronPicker/CronPicker.stories.tsx | 3 +- .../Datasource/ChangeDatasourceModal.tsx | 3 +- .../src/components/DeleteModal/index.tsx | 2 +- .../components/Dropdown/Dropdown.stories.tsx | 2 +- .../src/components/Dropdown/index.tsx | 17 +- .../DropdownButton/DropdownButton.stories.tsx | 2 +- .../src/components/Input/index.tsx | 35 ++++ .../ListViewCard/ListViewCard.stories.tsx | 3 +- .../src/components/ListViewCard/index.tsx | 12 +- .../src/components/Menu/index.tsx | 168 +++++++++++++++ .../src/components/PopoverDropdown/index.tsx | 3 +- .../HeaderReportActionsDropdown/index.tsx | 3 +- .../dashboard/components/CssEditor/index.jsx | 3 +- .../Header/HeaderActionsDropdown/index.jsx | 3 +- .../components/PropertiesModal/index.tsx | 3 +- .../src/dashboard/components/SaveModal.tsx | 3 +- .../src/dashboard/components/SliceAdder.jsx | 2 +- .../SliceHeaderControls.test.tsx | 4 +- .../components/SliceHeaderControls/index.tsx | 3 +- .../ShareMenuItems/ShareMenuItems.test.tsx | 2 +- .../components/menu/ShareMenuItems/index.tsx | 2 +- .../FilterBar/FilterSets/FilterSetUnit.tsx | 3 +- .../FiltersConfigModal/DividerConfigForm.tsx | 2 +- .../FiltersConfigModal/FilterTitlePane.tsx | 3 +- .../FiltersConfigForm/FiltersConfigForm.tsx | 2 +- .../NativeFiltersModal.test.tsx | 3 +- .../components/DataTableControl/index.tsx | 3 +- .../components/DatasourcePanel/index.tsx | 2 +- .../ExploreAdditionalActionsMenu.test.jsx | 3 +- .../ExploreAdditionalActionsMenu/index.jsx | 3 +- .../components/ExportToCSVDropdown/index.tsx | 3 +- .../components/PropertiesModal/index.tsx | 3 +- .../src/explore/components/SaveModal.tsx | 2 +- .../components/controls/BoundsControl.tsx | 2 +- .../FormattingPopoverContent.tsx | 3 +- .../DatasourceControl.test.jsx | 2 +- .../controls/DatasourceControl/index.jsx | 3 +- .../components/AdvancedFrame.tsx | 2 +- .../components/CustomFrame.tsx | 3 +- .../DndColumnSelectPopoverTitle.jsx | 2 +- .../index.tsx | 2 +- .../components/controls/HiddenControl.tsx | 2 +- .../AdhocMetricEditPopoverTitle.jsx | 2 +- .../components/controls/TextAreaControl.jsx | 2 +- .../controls/TextAreaControl.test.jsx | 2 +- .../components/controls/TextControl/index.tsx | 2 +- .../TimeSeriesColumnControl/index.jsx | 3 +- .../VizTypeControl/VizTypeGallery.tsx | 3 +- .../AlertReportCronScheduler.test.tsx | 3 +- .../components/AlertReportCronScheduler.tsx | 3 +- .../src/views/CRUD/chart/ChartCard.tsx | 3 +- .../views/CRUD/dashboard/DashboardCard.tsx | 3 +- .../src/views/CRUD/welcome/SavedQueries.tsx | 3 +- .../views/components/LanguagePicker.test.tsx | 2 +- .../src/views/components/LanguagePicker.tsx | 2 +- .../src/views/components/Menu.tsx | 9 +- .../src/views/components/MenuRight.tsx | 2 +- .../src/views/components/SubMenu.tsx | 3 +- 66 files changed, 331 insertions(+), 279 deletions(-) create mode 100644 superset-frontend/src/components/Input/index.tsx create mode 100644 superset-frontend/src/components/Menu/index.tsx diff --git a/superset-frontend/src/SqlLab/components/RunQueryActionButton/index.tsx b/superset-frontend/src/SqlLab/components/RunQueryActionButton/index.tsx index 0769043862942..398ec3011008d 100644 --- a/superset-frontend/src/SqlLab/components/RunQueryActionButton/index.tsx +++ b/superset-frontend/src/SqlLab/components/RunQueryActionButton/index.tsx @@ -19,7 +19,7 @@ import React from 'react'; import { t, styled, supersetTheme } from '@superset-ui/core'; -import { Menu } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import Button, { ButtonProps } from 'src/components/Button'; import Icons from 'src/components/Icons'; import { diff --git a/superset-frontend/src/SqlLab/components/SaveDatasetModal/SaveDatasetModal.test.tsx b/superset-frontend/src/SqlLab/components/SaveDatasetModal/SaveDatasetModal.test.tsx index 9fbb757bbae88..2bd376e126b54 100644 --- a/superset-frontend/src/SqlLab/components/SaveDatasetModal/SaveDatasetModal.test.tsx +++ b/superset-frontend/src/SqlLab/components/SaveDatasetModal/SaveDatasetModal.test.tsx @@ -19,7 +19,8 @@ import React from 'react'; import { shallow } from 'enzyme'; import { Radio } from 'src/components/Radio'; -import { AutoComplete, Input } from 'src/common/components'; +import { AutoComplete } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { SaveDatasetModal } from 'src/SqlLab/components/SaveDatasetModal'; describe('SaveDatasetModal', () => { diff --git a/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx b/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx index f1e92a73a0b18..76d272a6586da 100644 --- a/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx +++ b/superset-frontend/src/SqlLab/components/SaveDatasetModal/index.tsx @@ -20,7 +20,8 @@ import React, { FunctionComponent } from 'react'; import { AutoCompleteProps } from 'antd/lib/auto-complete'; import { Radio } from 'src/components/Radio'; -import { AutoComplete, Input } from 'src/common/components'; +import { AutoComplete } from 'src/common/components'; +import { Input } from 'src/components/Input'; import StyledModal from 'src/components/Modal'; import Button from 'src/components/Button'; import { styled, t } from '@superset-ui/core'; diff --git a/superset-frontend/src/SqlLab/components/SaveQuery/index.tsx b/superset-frontend/src/SqlLab/components/SaveQuery/index.tsx index 99cda810b23fc..cc00fc5aeacf2 100644 --- a/superset-frontend/src/SqlLab/components/SaveQuery/index.tsx +++ b/superset-frontend/src/SqlLab/components/SaveQuery/index.tsx @@ -17,7 +17,8 @@ * under the License. */ import React, { useState, useEffect } from 'react'; -import { Row, Col, Input, TextArea } from 'src/common/components'; +import { Row, Col } from 'src/common/components'; +import { Input, TextArea } from 'src/components/Input'; import { t, styled } from '@superset-ui/core'; import Button from 'src/components/Button'; import { Form, FormItem } from 'src/components/Form'; diff --git a/superset-frontend/src/SqlLab/components/ScheduleQueryButton/index.tsx b/superset-frontend/src/SqlLab/components/ScheduleQueryButton/index.tsx index 6f9bf9b7d412e..6ad168030fb92 100644 --- a/superset-frontend/src/SqlLab/components/ScheduleQueryButton/index.tsx +++ b/superset-frontend/src/SqlLab/components/ScheduleQueryButton/index.tsx @@ -18,7 +18,8 @@ */ import React, { FunctionComponent, useState } from 'react'; import SchemaForm, { FormProps, FormValidation } from 'react-jsonschema-form'; -import { Row, Col, Input, TextArea } from 'src/common/components'; +import { Row, Col } from 'src/common/components'; +import { Input, TextArea } from 'src/components/Input'; import { t, styled } from '@superset-ui/core'; import * as chrono from 'chrono-node'; import ModalTrigger from 'src/components/ModalTrigger'; diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx b/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx index 22711fb88d9e8..cbfbb41968c80 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx +++ b/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx @@ -31,13 +31,9 @@ import StyledModal from 'src/components/Modal'; import Mousetrap from 'mousetrap'; import Button from 'src/components/Button'; import Timer from 'src/components/Timer'; -import { - Dropdown, - Menu as AntdMenu, - Menu, - Switch, - Input, -} from 'src/common/components'; +import { Dropdown, Switch } from 'src/common/components'; +import { Input } from 'src/components/Input'; +import { Menu } from 'src/components/Menu'; import Icons from 'src/components/Icons'; import { detectOS } from 'src/utils/common'; import { @@ -578,19 +574,16 @@ class SqlEditor extends React.PureComponent { LIMIT_DROPDOWN.push(maxRow); return ( - + {[...new Set(LIMIT_DROPDOWN)].map(limit => ( - this.setQueryLimit(limit)} - > + this.setQueryLimit(limit)}> {/* // eslint-disable-line no-use-before-define */} {this.convertToNumWithSpaces(limit)} {' '} - + ))} - + ); } diff --git a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx index 252051b69d63e..cf2867eb8962c 100644 --- a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx +++ b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx @@ -20,7 +20,7 @@ import React from 'react'; import PropTypes from 'prop-types'; import { Dropdown } from 'src/components/Dropdown'; import { EditableTabs } from 'src/components/Tabs'; -import { Menu } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; import URI from 'urijs'; diff --git a/superset-frontend/src/common/components/index.tsx b/superset-frontend/src/common/components/index.tsx index e03194c40db38..7e5e4fdef13e4 100644 --- a/superset-frontend/src/common/components/index.tsx +++ b/superset-frontend/src/common/components/index.tsx @@ -16,16 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -import React, { RefObject } from 'react'; -import { styled } from '@superset-ui/core'; -import { - Dropdown, - Menu as AntdMenu, - Input as AntdInput, - InputNumber as AntdInputNumber, - Skeleton, -} from 'antd'; -import { DropDownProps } from 'antd/lib/dropdown'; /* Antd is re-exported from here so we can override components with Emotion as needed. @@ -70,193 +60,8 @@ export { default as List } from 'antd/lib/list'; export type { AlertProps } from 'antd/lib/alert'; export type { SelectProps } from 'antd/lib/select'; export type { ListItemProps } from 'antd/lib/list'; - export { default as Collapse } from 'src/components/Collapse'; export { default as Badge } from 'src/components/Badge'; export { default as Card } from 'src/components/Card'; export { default as Progress } from 'src/components/ProgressBar'; - -export const MenuItem = styled(AntdMenu.Item)` - > a { - text-decoration: none; - } - - &.ant-menu-item { - height: ${({ theme }) => theme.gridUnit * 7}px; - line-height: ${({ theme }) => theme.gridUnit * 7}px; - a { - border-bottom: none; - transition: background-color ${({ theme }) => theme.transitionTiming}s; - &:after { - content: ''; - position: absolute; - bottom: -3px; - left: 50%; - width: 0; - height: 3px; - opacity: 0; - transform: translateX(-50%); - transition: all ${({ theme }) => theme.transitionTiming}s; - background-color: ${({ theme }) => theme.colors.primary.base}; - } - &:focus { - border-bottom: none; - background-color: transparent; - @media (max-width: 767px) { - background-color: ${({ theme }) => theme.colors.primary.light5}; - } - } - } - } - - &.ant-menu-item, - &.ant-dropdown-menu-item { - span[role='button'] { - display: inline-block; - width: 100%; - } - transition-duration: 0s; - } -`; - -export const StyledNav = styled(AntdMenu)` - line-height: 51px; - border: none; - - & > .ant-menu-item, - & > .ant-menu-submenu { - vertical-align: inherit; - &:hover { - color: ${({ theme }) => theme.colors.grayscale.dark1}; - } - } - - &:not(.ant-menu-dark) > .ant-menu-submenu, - &:not(.ant-menu-dark) > .ant-menu-item { - &:hover { - border-bottom: none; - } - } - - &:not(.ant-menu-dark) > .ant-menu-submenu, - &:not(.ant-menu-dark) > .ant-menu-item { - margin: 0px; - } - - & > .ant-menu-item > a { - padding: ${({ theme }) => theme.gridUnit * 4}px; - } -`; - -export const StyledSubMenu = styled(AntdMenu.SubMenu)` - color: ${({ theme }) => theme.colors.grayscale.dark1}; - border-bottom: none; - .ant-menu-submenu-open, - .ant-menu-submenu-active { - background-color: ${({ theme }) => theme.colors.primary.light5}; - .ant-menu-submenu-title { - color: ${({ theme }) => theme.colors.grayscale.dark1}; - background-color: ${({ theme }) => theme.colors.primary.light5}; - border-bottom: none; - margin: 0; - &:after { - opacity: 1; - width: calc(100% - 1); - } - } - } - .ant-menu-submenu-title { - position: relative; - top: ${({ theme }) => -theme.gridUnit - 3}px; - &:after { - content: ''; - position: absolute; - bottom: -3px; - left: 50%; - width: 0; - height: 3px; - opacity: 0; - transform: translateX(-50%); - transition: all ${({ theme }) => theme.transitionTiming}s; - background-color: ${({ theme }) => theme.colors.primary.base}; - } - } - .ant-menu-submenu-arrow { - top: 67%; - } - & > .ant-menu-submenu-title { - padding: 0 ${({ theme }) => theme.gridUnit * 6}px 0 - ${({ theme }) => theme.gridUnit * 3}px !important; - span[role='img'] { - position: absolute; - right: ${({ theme }) => -theme.gridUnit + -2}px; - top: ${({ theme }) => theme.gridUnit * 5.25}px; - svg { - font-size: ${({ theme }) => theme.gridUnit * 6}px; - color: ${({ theme }) => theme.colors.grayscale.base}; - } - } - & > span { - position: relative; - top: 7px; - } - &:hover { - color: ${({ theme }) => theme.colors.primary.base}; - } - } -`; - -export declare type MenuMode = - | 'vertical' - | 'vertical-left' - | 'vertical-right' - | 'horizontal' - | 'inline'; -export const Menu = Object.assign(AntdMenu, { - Item: MenuItem, -}); - -export const MainNav = Object.assign(StyledNav, { - Item: MenuItem, - SubMenu: StyledSubMenu, - Divider: AntdMenu.Divider, - ItemGroup: AntdMenu.ItemGroup, -}); - -interface ExtendedDropDownProps extends DropDownProps { - ref?: RefObject; -} - -export const Input = styled(AntdInput)` - border: 1px solid ${({ theme }) => theme.colors.secondary.light3}; - border-radius: ${({ theme }) => theme.borderRadius}px; -`; - -export const InputNumber = styled(AntdInputNumber)` - border: 1px solid ${({ theme }) => theme.colors.secondary.light3}; - border-radius: ${({ theme }) => theme.borderRadius}px; -`; - -export const TextArea = styled(AntdInput.TextArea)` - border: 1px solid ${({ theme }) => theme.colors.secondary.light3}; - border-radius: ${({ theme }) => theme.borderRadius}px; -`; - -// @z-index-below-dashboard-header (100) - 1 = 99 -export const NoAnimationDropdown = ( - props: ExtendedDropDownProps & { children?: React.ReactNode }, -) => ( - -); - -export const ThinSkeleton = styled(Skeleton)` - h3 { - margin: ${({ theme }) => theme.gridUnit}px 0; - } - - ul { - margin-bottom: 0; - } -`; - export { default as Icon } from '@ant-design/icons'; diff --git a/superset-frontend/src/components/CronPicker/CronPicker.stories.tsx b/superset-frontend/src/components/CronPicker/CronPicker.stories.tsx index 7f8993684fe9b..d7abdf9585699 100644 --- a/superset-frontend/src/components/CronPicker/CronPicker.stories.tsx +++ b/superset-frontend/src/components/CronPicker/CronPicker.stories.tsx @@ -17,7 +17,8 @@ * under the License. */ import React, { useState, useRef, useCallback } from 'react'; -import { Input, Divider } from 'src/common/components'; +import { Divider } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { CronPicker, CronError, CronProps } from '.'; export default { diff --git a/superset-frontend/src/components/Datasource/ChangeDatasourceModal.tsx b/superset-frontend/src/components/Datasource/ChangeDatasourceModal.tsx index 1a655c71e7f17..5aec5cc3a4577 100644 --- a/superset-frontend/src/components/Datasource/ChangeDatasourceModal.tsx +++ b/superset-frontend/src/components/Datasource/ChangeDatasourceModal.tsx @@ -35,7 +35,8 @@ import { useDebouncedEffect } from 'src/explore/exploreUtils'; import { SLOW_DEBOUNCE } from 'src/constants'; import { getClientErrorObject } from 'src/utils/getClientErrorObject'; import Loading from 'src/components/Loading'; -import { Input, AntdInput } from 'src/common/components'; +import { AntdInput } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { PAGE_SIZE as DATASET_PAGE_SIZE, SORT_BY as DATASET_SORT_BY, diff --git a/superset-frontend/src/components/DeleteModal/index.tsx b/superset-frontend/src/components/DeleteModal/index.tsx index 97f6b469334c2..b8b577d3060f3 100644 --- a/superset-frontend/src/components/DeleteModal/index.tsx +++ b/superset-frontend/src/components/DeleteModal/index.tsx @@ -18,7 +18,7 @@ */ import { t, styled } from '@superset-ui/core'; import React, { useState } from 'react'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; import Modal from 'src/components/Modal'; import { FormLabel } from 'src/components/Form'; diff --git a/superset-frontend/src/components/Dropdown/Dropdown.stories.tsx b/superset-frontend/src/components/Dropdown/Dropdown.stories.tsx index b5ff1aafd9166..0d15b17ce4133 100644 --- a/superset-frontend/src/components/Dropdown/Dropdown.stories.tsx +++ b/superset-frontend/src/components/Dropdown/Dropdown.stories.tsx @@ -17,7 +17,7 @@ * under the License. */ import React from 'react'; -import { Menu } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import { Dropdown, DropdownProps } from '.'; export default { diff --git a/superset-frontend/src/components/Dropdown/index.tsx b/superset-frontend/src/components/Dropdown/index.tsx index 110b2c6a99b96..d59d4e630912c 100644 --- a/superset-frontend/src/components/Dropdown/index.tsx +++ b/superset-frontend/src/components/Dropdown/index.tsx @@ -16,8 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -import React from 'react'; +import React, { RefObject } from 'react'; import { Dropdown as AntdDropdown } from 'antd'; +import { DropDownProps } from 'antd/lib/dropdown'; import { styled } from '@superset-ui/core'; const MenuDots = styled.div` @@ -76,3 +77,17 @@ export const Dropdown = ({ overlay, ...rest }: DropdownProps) => ( ); + +interface ExtendedDropDownProps extends DropDownProps { + ref?: RefObject; +} + +// @z-index-below-dashboard-header (100) - 1 = 99 +export const NoAnimationDropdown = ( + props: ExtendedDropDownProps & { children?: React.ReactNode }, +) => ( + +); diff --git a/superset-frontend/src/components/DropdownButton/DropdownButton.stories.tsx b/superset-frontend/src/components/DropdownButton/DropdownButton.stories.tsx index 5dc8727bd8ffd..eefe5b781af8d 100644 --- a/superset-frontend/src/components/DropdownButton/DropdownButton.stories.tsx +++ b/superset-frontend/src/components/DropdownButton/DropdownButton.stories.tsx @@ -17,7 +17,7 @@ * under the License. */ import React from 'react'; -import { Menu } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import { DropdownButton, DropdownButtonProps } from '.'; export default { diff --git a/superset-frontend/src/components/Input/index.tsx b/superset-frontend/src/components/Input/index.tsx new file mode 100644 index 0000000000000..c1855f97aa781 --- /dev/null +++ b/superset-frontend/src/components/Input/index.tsx @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { styled } from '@superset-ui/core'; +import { Input as AntdInput, InputNumber as AntdInputNumber } from 'antd'; + +export const Input = styled(AntdInput)` + border: 1px solid ${({ theme }) => theme.colors.secondary.light3}; + border-radius: ${({ theme }) => theme.borderRadius}px; +`; + +export const InputNumber = styled(AntdInputNumber)` + border: 1px solid ${({ theme }) => theme.colors.secondary.light3}; + border-radius: ${({ theme }) => theme.borderRadius}px; +`; + +export const TextArea = styled(AntdInput.TextArea)` + border: 1px solid ${({ theme }) => theme.colors.secondary.light3}; + border-radius: ${({ theme }) => theme.borderRadius}px; +`; diff --git a/superset-frontend/src/components/ListViewCard/ListViewCard.stories.tsx b/superset-frontend/src/components/ListViewCard/ListViewCard.stories.tsx index b088bd6a4eee6..3e8a63879e990 100644 --- a/superset-frontend/src/components/ListViewCard/ListViewCard.stories.tsx +++ b/superset-frontend/src/components/ListViewCard/ListViewCard.stories.tsx @@ -21,7 +21,8 @@ import { action } from '@storybook/addon-actions'; import { withKnobs, boolean, select, text } from '@storybook/addon-knobs'; import DashboardImg from 'src/assets/images/dashboard-card-fallback.svg'; import ChartImg from 'src/assets/images/chart-card-fallback.svg'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import Icons from 'src/components/Icons'; import FaveStar from 'src/components/FaveStar'; import ListViewCard from '.'; diff --git a/superset-frontend/src/components/ListViewCard/index.tsx b/superset-frontend/src/components/ListViewCard/index.tsx index 6f0e822069d76..0402a484900fe 100644 --- a/superset-frontend/src/components/ListViewCard/index.tsx +++ b/superset-frontend/src/components/ListViewCard/index.tsx @@ -18,7 +18,7 @@ */ import React from 'react'; import { styled, useTheme } from '@superset-ui/core'; -import { AntdCard, Skeleton, ThinSkeleton } from 'src/common/components'; +import { AntdCard, Skeleton } from 'src/common/components'; import { Tooltip } from 'src/components/Tooltip'; import ImageLoader, { BackgroundPosition } from './ImageLoader'; import CertifiedBadge from '../CertifiedBadge'; @@ -136,6 +136,16 @@ const CoverFooterRight = styled.div` text-overflow: ellipsis; `; +const ThinSkeleton = styled(Skeleton)` + h3 { + margin: ${({ theme }) => theme.gridUnit}px 0; + } + + ul { + margin-bottom: 0; + } +`; + const paragraphConfig = { rows: 1, width: 150 }; interface LinkProps { diff --git a/superset-frontend/src/components/Menu/index.tsx b/superset-frontend/src/components/Menu/index.tsx new file mode 100644 index 0000000000000..988f9160c6ad0 --- /dev/null +++ b/superset-frontend/src/components/Menu/index.tsx @@ -0,0 +1,168 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { styled } from '@superset-ui/core'; +import { Menu as AntdMenu } from 'antd'; + +const MenuItem = styled(AntdMenu.Item)` + > a { + text-decoration: none; + } + + &.ant-menu-item { + height: ${({ theme }) => theme.gridUnit * 7}px; + line-height: ${({ theme }) => theme.gridUnit * 7}px; + a { + border-bottom: none; + transition: background-color ${({ theme }) => theme.transitionTiming}s; + &:after { + content: ''; + position: absolute; + bottom: -3px; + left: 50%; + width: 0; + height: 3px; + opacity: 0; + transform: translateX(-50%); + transition: all ${({ theme }) => theme.transitionTiming}s; + background-color: ${({ theme }) => theme.colors.primary.base}; + } + &:focus { + border-bottom: none; + background-color: transparent; + @media (max-width: 767px) { + background-color: ${({ theme }) => theme.colors.primary.light5}; + } + } + } + } + + &.ant-menu-item, + &.ant-dropdown-menu-item { + span[role='button'] { + display: inline-block; + width: 100%; + } + transition-duration: 0s; + } +`; + +const StyledNav = styled(AntdMenu)` + line-height: 51px; + border: none; + + & > .ant-menu-item, + & > .ant-menu-submenu { + vertical-align: inherit; + &:hover { + color: ${({ theme }) => theme.colors.grayscale.dark1}; + } + } + + &:not(.ant-menu-dark) > .ant-menu-submenu, + &:not(.ant-menu-dark) > .ant-menu-item { + &:hover { + border-bottom: none; + } + } + + &:not(.ant-menu-dark) > .ant-menu-submenu, + &:not(.ant-menu-dark) > .ant-menu-item { + margin: 0px; + } + + & > .ant-menu-item > a { + padding: ${({ theme }) => theme.gridUnit * 4}px; + } +`; + +const StyledSubMenu = styled(AntdMenu.SubMenu)` + color: ${({ theme }) => theme.colors.grayscale.dark1}; + border-bottom: none; + .ant-menu-submenu-open, + .ant-menu-submenu-active { + background-color: ${({ theme }) => theme.colors.primary.light5}; + .ant-menu-submenu-title { + color: ${({ theme }) => theme.colors.grayscale.dark1}; + background-color: ${({ theme }) => theme.colors.primary.light5}; + border-bottom: none; + margin: 0; + &:after { + opacity: 1; + width: calc(100% - 1); + } + } + } + .ant-menu-submenu-title { + position: relative; + top: ${({ theme }) => -theme.gridUnit - 3}px; + &:after { + content: ''; + position: absolute; + bottom: -3px; + left: 50%; + width: 0; + height: 3px; + opacity: 0; + transform: translateX(-50%); + transition: all ${({ theme }) => theme.transitionTiming}s; + background-color: ${({ theme }) => theme.colors.primary.base}; + } + } + .ant-menu-submenu-arrow { + top: 67%; + } + & > .ant-menu-submenu-title { + padding: 0 ${({ theme }) => theme.gridUnit * 6}px 0 + ${({ theme }) => theme.gridUnit * 3}px !important; + span[role='img'] { + position: absolute; + right: ${({ theme }) => -theme.gridUnit + -2}px; + top: ${({ theme }) => theme.gridUnit * 5.25}px; + svg { + font-size: ${({ theme }) => theme.gridUnit * 6}px; + color: ${({ theme }) => theme.colors.grayscale.base}; + } + } + & > span { + position: relative; + top: 7px; + } + &:hover { + color: ${({ theme }) => theme.colors.primary.base}; + } + } +`; + +export declare type MenuMode = + | 'vertical' + | 'vertical-left' + | 'vertical-right' + | 'horizontal' + | 'inline'; + +export const Menu = Object.assign(AntdMenu, { + Item: MenuItem, +}); + +export const MainNav = Object.assign(StyledNav, { + Item: MenuItem, + SubMenu: StyledSubMenu, + Divider: AntdMenu.Divider, + ItemGroup: AntdMenu.ItemGroup, +}); diff --git a/superset-frontend/src/components/PopoverDropdown/index.tsx b/superset-frontend/src/components/PopoverDropdown/index.tsx index 1f3f1747bae13..d035da88c5681 100644 --- a/superset-frontend/src/components/PopoverDropdown/index.tsx +++ b/superset-frontend/src/components/PopoverDropdown/index.tsx @@ -19,7 +19,8 @@ import React from 'react'; import cx from 'classnames'; import { styled, useTheme } from '@superset-ui/core'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import Icons from 'src/components/Icons'; export interface OptionProps { diff --git a/superset-frontend/src/components/ReportModal/HeaderReportActionsDropdown/index.tsx b/superset-frontend/src/components/ReportModal/HeaderReportActionsDropdown/index.tsx index 6fc39283600b1..53de9e4d8c36b 100644 --- a/superset-frontend/src/components/ReportModal/HeaderReportActionsDropdown/index.tsx +++ b/superset-frontend/src/components/ReportModal/HeaderReportActionsDropdown/index.tsx @@ -22,7 +22,8 @@ import { t, SupersetTheme, css, useTheme } from '@superset-ui/core'; import Icons from 'src/components/Icons'; import { Switch } from 'src/components/Switch'; import { AlertObject } from 'src/views/CRUD/alert/types'; -import { Menu, NoAnimationDropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; +import { NoAnimationDropdown } from 'src/components/Dropdown'; import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags'; import DeleteModal from 'src/components/DeleteModal'; diff --git a/superset-frontend/src/dashboard/components/CssEditor/index.jsx b/superset-frontend/src/dashboard/components/CssEditor/index.jsx index 995dc081fe2b2..b18130cb81172 100644 --- a/superset-frontend/src/dashboard/components/CssEditor/index.jsx +++ b/superset-frontend/src/dashboard/components/CssEditor/index.jsx @@ -18,7 +18,8 @@ */ import React from 'react'; import PropTypes from 'prop-types'; -import { Menu, Dropdown } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import Button from 'src/components/Button'; import { t, styled } from '@superset-ui/core'; import ModalTrigger from 'src/components/ModalTrigger'; diff --git a/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx b/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx index ef7fd17d22de0..b86af3da8a89d 100644 --- a/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx +++ b/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx @@ -21,7 +21,8 @@ import PropTypes from 'prop-types'; import { styled, SupersetClient, t } from '@superset-ui/core'; -import { Menu, NoAnimationDropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; +import { NoAnimationDropdown } from 'src/components/Dropdown'; import Icons from 'src/components/Icons'; import { URL_PARAMS } from 'src/constants'; import ShareMenuItems from 'src/dashboard/components/menu/ShareMenuItems'; diff --git a/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx b/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx index 41d0850035a0e..335693c11efd9 100644 --- a/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx +++ b/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx @@ -17,7 +17,8 @@ * under the License. */ import React, { useCallback, useEffect, useState } from 'react'; -import { Form, Row, Col, Input } from 'src/common/components'; +import { Form, Row, Col } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { FormItem } from 'src/components/Form'; import jsonStringify from 'json-stringify-pretty-compact'; import Button from 'src/components/Button'; diff --git a/superset-frontend/src/dashboard/components/SaveModal.tsx b/superset-frontend/src/dashboard/components/SaveModal.tsx index bf3e932e203fc..ad30962ac4eac 100644 --- a/superset-frontend/src/dashboard/components/SaveModal.tsx +++ b/superset-frontend/src/dashboard/components/SaveModal.tsx @@ -19,7 +19,8 @@ /* eslint-env browser */ import React from 'react'; import { Radio } from 'src/components/Radio'; -import { RadioChangeEvent, Input } from 'src/common/components'; +import { RadioChangeEvent } from 'src/common/components'; +import { Input } from 'src/components/Input'; import Button from 'src/components/Button'; import { t, JsonResponse } from '@superset-ui/core'; diff --git a/superset-frontend/src/dashboard/components/SliceAdder.jsx b/superset-frontend/src/dashboard/components/SliceAdder.jsx index d1caf980fb9d2..8c2fc920e9b20 100644 --- a/superset-frontend/src/dashboard/components/SliceAdder.jsx +++ b/superset-frontend/src/dashboard/components/SliceAdder.jsx @@ -22,7 +22,7 @@ import PropTypes from 'prop-types'; import { List } from 'react-virtualized'; import { createFilter } from 'react-search-input'; import { t, styled, isFeatureEnabled, FeatureFlag } from '@superset-ui/core'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { Select } from 'src/components'; import Loading from 'src/components/Loading'; import { diff --git a/superset-frontend/src/dashboard/components/SliceHeaderControls/SliceHeaderControls.test.tsx b/superset-frontend/src/dashboard/components/SliceHeaderControls/SliceHeaderControls.test.tsx index 3f78c01439eb2..c5a19de74a83e 100644 --- a/superset-frontend/src/dashboard/components/SliceHeaderControls/SliceHeaderControls.test.tsx +++ b/superset-frontend/src/dashboard/components/SliceHeaderControls/SliceHeaderControls.test.tsx @@ -23,8 +23,8 @@ import { render, screen } from 'spec/helpers/testing-library'; import { FeatureFlag } from 'src/featureFlags'; import SliceHeaderControls from '.'; -jest.mock('src/common/components', () => { - const original = jest.requireActual('src/common/components'); +jest.mock('src/components/Dropdown', () => { + const original = jest.requireActual('src/components/Dropdown'); return { ...original, NoAnimationDropdown: (props: any) => ( diff --git a/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx b/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx index f39eda24272ea..099ec10132235 100644 --- a/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx +++ b/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx @@ -24,7 +24,8 @@ import { styled, t, } from '@superset-ui/core'; -import { Menu, NoAnimationDropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; +import { NoAnimationDropdown } from 'src/components/Dropdown'; import ShareMenuItems from 'src/dashboard/components/menu/ShareMenuItems'; import downloadAsImage from 'src/utils/downloadAsImage'; import { FeatureFlag, isFeatureEnabled } from 'src/featureFlags'; diff --git a/superset-frontend/src/dashboard/components/menu/ShareMenuItems/ShareMenuItems.test.tsx b/superset-frontend/src/dashboard/components/menu/ShareMenuItems/ShareMenuItems.test.tsx index bdf22009efda5..da7d196bd8b50 100644 --- a/superset-frontend/src/dashboard/components/menu/ShareMenuItems/ShareMenuItems.test.tsx +++ b/superset-frontend/src/dashboard/components/menu/ShareMenuItems/ShareMenuItems.test.tsx @@ -18,7 +18,7 @@ */ import React from 'react'; -import { Menu } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import { render, screen, waitFor } from 'spec/helpers/testing-library'; import userEvent from '@testing-library/user-event'; import * as copyTextToClipboard from 'src/utils/copy'; diff --git a/superset-frontend/src/dashboard/components/menu/ShareMenuItems/index.tsx b/superset-frontend/src/dashboard/components/menu/ShareMenuItems/index.tsx index 9013369d7ac71..cb31503ac8611 100644 --- a/superset-frontend/src/dashboard/components/menu/ShareMenuItems/index.tsx +++ b/superset-frontend/src/dashboard/components/menu/ShareMenuItems/index.tsx @@ -20,7 +20,7 @@ import React from 'react'; import { useUrlShortener } from 'src/hooks/useUrlShortener'; import copyTextToClipboard from 'src/utils/copy'; import { t, logging } from '@superset-ui/core'; -import { Menu } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import { getUrlParam } from 'src/utils/urlUtils'; import { postFormData } from 'src/explore/exploreUtils/formData'; import { useTabId } from 'src/hooks/useTabId'; diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterSets/FilterSetUnit.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterSets/FilterSetUnit.tsx index 00b3c9a9f32c0..e9aed5d0d9cd8 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterSets/FilterSetUnit.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterSets/FilterSetUnit.tsx @@ -16,7 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -import { Typography, Dropdown, Menu } from 'src/common/components'; +import { Typography, Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import React, { FC } from 'react'; import { DataMaskState, diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/DividerConfigForm.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/DividerConfigForm.tsx index afef49684fdf7..6db4db853e2e9 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/DividerConfigForm.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/DividerConfigForm.tsx @@ -18,7 +18,7 @@ */ import React from 'react'; import { FormItem } from 'src/components/Form'; -import { Input, TextArea } from 'src/common/components'; +import { Input, TextArea } from 'src/components/Input'; import { NativeFilterType, styled, t } from '@superset-ui/core'; interface Props { diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitlePane.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitlePane.tsx index 756a3e0e9a8a7..d9981cbe6f04c 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitlePane.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitlePane.tsx @@ -18,7 +18,8 @@ */ import { NativeFilterType, styled, t, useTheme } from '@superset-ui/core'; import React from 'react'; -import { Dropdown, MainNav as Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { MainNav as Menu } from 'src/components/Menu'; import FilterTitleContainer from './FilterTitleContainer'; import { FilterRemoval } from './types'; diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FiltersConfigForm.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FiltersConfigForm.tsx index 97e1d6853e1b4..d698c9214c5ad 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FiltersConfigForm.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FiltersConfigForm.tsx @@ -50,7 +50,7 @@ import React, { import { PluginFilterSelectCustomizeProps } from 'src/filters/components/Select/types'; import { useSelector } from 'react-redux'; import { getChartDataRequest } from 'src/chart/chartAction'; -import { Input, TextArea } from 'src/common/components'; +import { Input, TextArea } from 'src/components/Input'; import { Select } from 'src/components'; import Collapse from 'src/components/Collapse'; import BasicErrorAlert from 'src/components/ErrorMessage/BasicErrorAlert'; diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/NativeFiltersModal.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/NativeFiltersModal.test.tsx index 51c119653bab9..ebe202e081033 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/NativeFiltersModal.test.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/NativeFiltersModal.test.tsx @@ -25,7 +25,8 @@ import { Provider } from 'react-redux'; import { mockStore } from 'spec/fixtures/mockStore'; import { styledMount as mount } from 'spec/helpers/theming'; import waitForComponentToPaint from 'spec/helpers/waitForComponentToPaint'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import Alert from 'src/components/Alert'; import { FiltersConfigModal } from 'src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal'; diff --git a/superset-frontend/src/explore/components/DataTableControl/index.tsx b/superset-frontend/src/explore/components/DataTableControl/index.tsx index 68e83bc1a6478..b9e11eda950ff 100644 --- a/superset-frontend/src/explore/components/DataTableControl/index.tsx +++ b/superset-frontend/src/explore/components/DataTableControl/index.tsx @@ -30,7 +30,8 @@ import { Global } from '@emotion/react'; import { Column } from 'react-table'; import debounce from 'lodash/debounce'; import { useDispatch } from 'react-redux'; -import { Input, Space } from 'src/common/components'; +import { Space } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { BOOL_FALSE_DISPLAY, BOOL_TRUE_DISPLAY, diff --git a/superset-frontend/src/explore/components/DatasourcePanel/index.tsx b/superset-frontend/src/explore/components/DatasourcePanel/index.tsx index caf8c1ff59c63..7e974d50da6d9 100644 --- a/superset-frontend/src/explore/components/DatasourcePanel/index.tsx +++ b/superset-frontend/src/explore/components/DatasourcePanel/index.tsx @@ -26,7 +26,7 @@ import { debounce } from 'lodash'; import { matchSorter, rankings } from 'match-sorter'; import { css, styled, t } from '@superset-ui/core'; import Collapse from 'src/components/Collapse'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { FAST_DEBOUNCE } from 'src/constants'; import { FeatureFlag, isFeatureEnabled } from 'src/featureFlags'; import { ExploreActions } from 'src/explore/actions/exploreActions'; diff --git a/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/ExploreAdditionalActionsMenu.test.jsx b/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/ExploreAdditionalActionsMenu.test.jsx index b40a8a8e8f5fe..b2c03ac032dab 100644 --- a/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/ExploreAdditionalActionsMenu.test.jsx +++ b/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/ExploreAdditionalActionsMenu.test.jsx @@ -21,7 +21,8 @@ import { shallow } from 'enzyme'; import { styledMount as mount } from 'spec/helpers/theming'; import thunk from 'redux-thunk'; import configureStore from 'redux-mock-store'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import ExploreAdditionalActionsMenu from 'src/explore/components/ExploreAdditionalActionsMenu'; const mockStore = configureStore([thunk]); diff --git a/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/index.jsx index 1a0c2c0a69591..7e2401abc6d48 100644 --- a/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/ExploreAdditionalActionsMenu/index.jsx @@ -21,7 +21,8 @@ import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; import PropTypes from 'prop-types'; import { t } from '@superset-ui/core'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import downloadAsImage from 'src/utils/downloadAsImage'; import ModalTrigger from 'src/components/ModalTrigger'; import { sliceUpdated } from 'src/explore/actions/exploreActions'; diff --git a/superset-frontend/src/explore/components/ExportToCSVDropdown/index.tsx b/superset-frontend/src/explore/components/ExportToCSVDropdown/index.tsx index ec5106711ab85..05404b0a68b46 100644 --- a/superset-frontend/src/explore/components/ExportToCSVDropdown/index.tsx +++ b/superset-frontend/src/explore/components/ExportToCSVDropdown/index.tsx @@ -19,7 +19,8 @@ import React, { ReactChild, useCallback } from 'react'; import { t, styled } from '@superset-ui/core'; import Icons from 'src/components/Icons'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; enum MENU_KEYS { EXPORT_ORIGINAL = 'export_original', diff --git a/superset-frontend/src/explore/components/PropertiesModal/index.tsx b/superset-frontend/src/explore/components/PropertiesModal/index.tsx index 8f6b69a82dca5..9ad64704fb06f 100644 --- a/superset-frontend/src/explore/components/PropertiesModal/index.tsx +++ b/superset-frontend/src/explore/components/PropertiesModal/index.tsx @@ -18,7 +18,8 @@ */ import React, { useMemo, useState, useCallback, useEffect } from 'react'; import Modal from 'src/components/Modal'; -import { Form, Row, Col, Input, TextArea } from 'src/common/components'; +import { Form, Row, Col } from 'src/common/components'; +import { Input, TextArea } from 'src/components/Input'; import Button from 'src/components/Button'; import { Select } from 'src/components'; import { SelectValue } from 'antd/lib/select'; diff --git a/superset-frontend/src/explore/components/SaveModal.tsx b/superset-frontend/src/explore/components/SaveModal.tsx index 0fe9b2f4417ad..292378290e98c 100644 --- a/superset-frontend/src/explore/components/SaveModal.tsx +++ b/superset-frontend/src/explore/components/SaveModal.tsx @@ -18,7 +18,7 @@ */ /* eslint camelcase: 0 */ import React from 'react'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { Form, FormItem } from 'src/components/Form'; import Alert from 'src/components/Alert'; import { JsonObject, t, styled } from '@superset-ui/core'; diff --git a/superset-frontend/src/explore/components/controls/BoundsControl.tsx b/superset-frontend/src/explore/components/controls/BoundsControl.tsx index 1565ce7d31c2c..97fd26e283e4b 100644 --- a/superset-frontend/src/explore/components/controls/BoundsControl.tsx +++ b/superset-frontend/src/explore/components/controls/BoundsControl.tsx @@ -17,7 +17,7 @@ * under the License. */ import React, { useEffect, useRef, useState } from 'react'; -import { InputNumber } from 'src/common/components'; +import { InputNumber } from 'src/components/Input'; import { t, styled } from '@superset-ui/core'; import { debounce } from 'lodash'; import ControlHeader from 'src/explore/components/ControlHeader'; diff --git a/superset-frontend/src/explore/components/controls/ConditionalFormattingControl/FormattingPopoverContent.tsx b/superset-frontend/src/explore/components/controls/ConditionalFormattingControl/FormattingPopoverContent.tsx index ee862b2b69732..76c738aa7a151 100644 --- a/superset-frontend/src/explore/components/controls/ConditionalFormattingControl/FormattingPopoverContent.tsx +++ b/superset-frontend/src/explore/components/controls/ConditionalFormattingControl/FormattingPopoverContent.tsx @@ -20,7 +20,8 @@ import React from 'react'; import { styled, t } from '@superset-ui/core'; import { Form, FormItem, FormProps } from 'src/components/Form'; import Select, { propertyComparator } from 'src/components/Select/Select'; -import { Col, InputNumber, Row } from 'src/common/components'; +import { Col, Row } from 'src/common/components'; +import { InputNumber } from 'src/components/Input'; import Button from 'src/components/Button'; import { COMPARATOR, diff --git a/superset-frontend/src/explore/components/controls/DatasourceControl/DatasourceControl.test.jsx b/superset-frontend/src/explore/components/controls/DatasourceControl/DatasourceControl.test.jsx index aed44f7fcb420..4248e8634cc9e 100644 --- a/superset-frontend/src/explore/components/controls/DatasourceControl/DatasourceControl.test.jsx +++ b/superset-frontend/src/explore/components/controls/DatasourceControl/DatasourceControl.test.jsx @@ -20,7 +20,7 @@ import React from 'react'; import sinon from 'sinon'; import configureStore from 'redux-mock-store'; import { shallow } from 'enzyme'; -import { Menu } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import { DatasourceModal, ChangeDatasourceModal, diff --git a/superset-frontend/src/explore/components/controls/DatasourceControl/index.jsx b/superset-frontend/src/explore/components/controls/DatasourceControl/index.jsx index c0dc8bb010001..4b11fe2991df4 100644 --- a/superset-frontend/src/explore/components/controls/DatasourceControl/index.jsx +++ b/superset-frontend/src/explore/components/controls/DatasourceControl/index.jsx @@ -22,7 +22,8 @@ import PropTypes from 'prop-types'; import { t, styled, supersetTheme } from '@superset-ui/core'; import { getUrlParam } from 'src/utils/urlUtils'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import { Tooltip } from 'src/components/Tooltip'; import Icons from 'src/components/Icons'; import { diff --git a/superset-frontend/src/explore/components/controls/DateFilterControl/components/AdvancedFrame.tsx b/superset-frontend/src/explore/components/controls/DateFilterControl/components/AdvancedFrame.tsx index 607cf99202223..a727a3f261aa2 100644 --- a/superset-frontend/src/explore/components/controls/DateFilterControl/components/AdvancedFrame.tsx +++ b/superset-frontend/src/explore/components/controls/DateFilterControl/components/AdvancedFrame.tsx @@ -19,7 +19,7 @@ import React from 'react'; import { t } from '@superset-ui/core'; import { SEPARATOR } from 'src/explore/components/controls/DateFilterControl/utils'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { InfoTooltipWithTrigger } from '@superset-ui/chart-controls'; import { FrameComponentProps } from 'src/explore/components/controls/DateFilterControl/types'; import DateFunctionTooltip from './DateFunctionTooltip'; diff --git a/superset-frontend/src/explore/components/controls/DateFilterControl/components/CustomFrame.tsx b/superset-frontend/src/explore/components/controls/DateFilterControl/components/CustomFrame.tsx index a26d7ca765574..513017e2a1b08 100644 --- a/superset-frontend/src/explore/components/controls/DateFilterControl/components/CustomFrame.tsx +++ b/superset-frontend/src/explore/components/controls/DateFilterControl/components/CustomFrame.tsx @@ -20,7 +20,8 @@ import React from 'react'; import { t } from '@superset-ui/core'; import { Moment } from 'moment'; import { isInteger } from 'lodash'; -import { Col, InputNumber, Row } from 'src/common/components'; +import { Col, Row } from 'src/common/components'; +import { InputNumber } from 'src/components/Input'; import { DatePicker } from 'src/components/DatePicker'; import { Radio } from 'src/components/Radio'; import Select, { propertyComparator } from 'src/components/Select/Select'; diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelectPopoverTitle.jsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelectPopoverTitle.jsx index e062357581a57..eecce0b33335b 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelectPopoverTitle.jsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelectPopoverTitle.jsx @@ -18,7 +18,7 @@ */ import React, { useCallback, useState } from 'react'; import { t } from '@superset-ui/core'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { Tooltip } from 'src/components/Tooltip'; export const DndColumnSelectPopoverTitle = ({ diff --git a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx index ceddf7cbd79fe..e86ed587c8e40 100644 --- a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx +++ b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSimpleTabContent/index.tsx @@ -36,7 +36,7 @@ import AdhocFilter, { EXPRESSION_TYPES, CLAUSES, } from 'src/explore/components/controls/FilterControl/AdhocFilter'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { propertyComparator } from 'src/components/Select/Select'; import { optionLabel } from 'src/utils/common'; diff --git a/superset-frontend/src/explore/components/controls/HiddenControl.tsx b/superset-frontend/src/explore/components/controls/HiddenControl.tsx index 867f0fe2a0736..10cfa6134b818 100644 --- a/superset-frontend/src/explore/components/controls/HiddenControl.tsx +++ b/superset-frontend/src/explore/components/controls/HiddenControl.tsx @@ -17,7 +17,7 @@ * under the License. */ import React from 'react'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; interface HiddenControlsProps { onChange: () => void; diff --git a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopoverTitle.jsx b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopoverTitle.jsx index 4e9e7c011cfe2..cef62505e4d0e 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopoverTitle.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopoverTitle.jsx @@ -19,7 +19,7 @@ import React from 'react'; import { t } from '@superset-ui/core'; import PropTypes from 'prop-types'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { Tooltip } from 'src/components/Tooltip'; const propTypes = { diff --git a/superset-frontend/src/explore/components/controls/TextAreaControl.jsx b/superset-frontend/src/explore/components/controls/TextAreaControl.jsx index d0f6503fc556b..e9c02f076e047 100644 --- a/superset-frontend/src/explore/components/controls/TextAreaControl.jsx +++ b/superset-frontend/src/explore/components/controls/TextAreaControl.jsx @@ -18,7 +18,7 @@ */ import React from 'react'; import PropTypes from 'prop-types'; -import { TextArea } from 'src/common/components'; +import { TextArea } from 'src/components/Input'; import { t } from '@superset-ui/core'; import Button from 'src/components/Button'; diff --git a/superset-frontend/src/explore/components/controls/TextAreaControl.test.jsx b/superset-frontend/src/explore/components/controls/TextAreaControl.test.jsx index fbbe88c85904d..d9aeccf7fad70 100644 --- a/superset-frontend/src/explore/components/controls/TextAreaControl.test.jsx +++ b/superset-frontend/src/explore/components/controls/TextAreaControl.test.jsx @@ -21,7 +21,7 @@ import React from 'react'; import sinon from 'sinon'; import { shallow } from 'enzyme'; import { TextAreaEditor } from 'src/components/AsyncAceEditor'; -import { TextArea } from 'src/common/components'; +import { TextArea } from 'src/components/Input'; import TextAreaControl from 'src/explore/components/controls/TextAreaControl'; diff --git a/superset-frontend/src/explore/components/controls/TextControl/index.tsx b/superset-frontend/src/explore/components/controls/TextControl/index.tsx index d5dc8e2427868..38877575d9fb9 100644 --- a/superset-frontend/src/explore/components/controls/TextControl/index.tsx +++ b/superset-frontend/src/explore/components/controls/TextControl/index.tsx @@ -21,7 +21,7 @@ import { legacyValidateNumber, legacyValidateInteger } from '@superset-ui/core'; import debounce from 'lodash/debounce'; import { FAST_DEBOUNCE } from 'src/constants'; import ControlHeader from 'src/explore/components/ControlHeader'; -import { Input } from 'src/common/components'; +import { Input } from 'src/components/Input'; type InputValueType = string | number; diff --git a/superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx b/superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx index 6f4585bdaeb89..7a72bd93c41ad 100644 --- a/superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx +++ b/superset-frontend/src/explore/components/controls/TimeSeriesColumnControl/index.jsx @@ -18,7 +18,8 @@ */ import React from 'react'; import PropTypes from 'prop-types'; -import { Row, Col, Input } from 'src/common/components'; +import { Row, Col } from 'src/common/components'; +import { Input } from 'src/components/Input'; import Button from 'src/components/Button'; import Popover from 'src/components/Popover'; import { Select } from 'src/components'; diff --git a/superset-frontend/src/explore/components/controls/VizTypeControl/VizTypeGallery.tsx b/superset-frontend/src/explore/components/controls/VizTypeControl/VizTypeGallery.tsx index c817c50c42ae5..bf8b3c8f7b695 100644 --- a/superset-frontend/src/explore/components/controls/VizTypeControl/VizTypeGallery.tsx +++ b/superset-frontend/src/explore/components/controls/VizTypeControl/VizTypeGallery.tsx @@ -34,7 +34,8 @@ import { SupersetTheme, useTheme, } from '@superset-ui/core'; -import { Collapse, Input } from 'src/common/components'; +import { Collapse } from 'src/common/components'; +import { Input } from 'src/components/Input'; import Label from 'src/components/Label'; import { usePluginContext } from 'src/components/DynamicPlugins'; import Icons from 'src/components/Icons'; diff --git a/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.test.tsx b/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.test.tsx index a58f36a19d10e..822b129c56de7 100644 --- a/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.test.tsx +++ b/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.test.tsx @@ -21,8 +21,7 @@ import React from 'react'; import { ReactWrapper } from 'enzyme'; import { styledMount as mount } from 'spec/helpers/theming'; import { CronPicker } from 'src/components/CronPicker'; -import { Input } from 'src/common/components'; - +import { Input } from 'src/components/Input'; import { AlertReportCronScheduler } from './AlertReportCronScheduler'; describe('AlertReportCronScheduler', () => { diff --git a/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.tsx b/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.tsx index d7e6af521dd69..41beefa4208d9 100644 --- a/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.tsx +++ b/superset-frontend/src/views/CRUD/alert/components/AlertReportCronScheduler.tsx @@ -19,7 +19,8 @@ import React, { useState, useCallback, useRef, FunctionComponent } from 'react'; import { t, useTheme } from '@superset-ui/core'; -import { Input, AntdInput } from 'src/common/components'; +import { AntdInput } from 'src/common/components'; +import { Input } from 'src/components/Input'; import { Radio } from 'src/components/Radio'; import { CronPicker, CronError } from 'src/components/CronPicker'; import { StyledInputContainer } from 'src/views/CRUD/alert/AlertReportModal'; diff --git a/superset-frontend/src/views/CRUD/chart/ChartCard.tsx b/superset-frontend/src/views/CRUD/chart/ChartCard.tsx index 40eb109974902..dc44edef91a90 100644 --- a/superset-frontend/src/views/CRUD/chart/ChartCard.tsx +++ b/superset-frontend/src/views/CRUD/chart/ChartCard.tsx @@ -25,7 +25,8 @@ import Chart from 'src/types/Chart'; import ListViewCard from 'src/components/ListViewCard'; import Label from 'src/components/Label'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import FaveStar from 'src/components/FaveStar'; import FacePile from 'src/components/FacePile'; import { handleChartDelete, CardStyles } from '../utils'; diff --git a/superset-frontend/src/views/CRUD/dashboard/DashboardCard.tsx b/superset-frontend/src/views/CRUD/dashboard/DashboardCard.tsx index 3bd28667f8985..7c6feb6db24a4 100644 --- a/superset-frontend/src/views/CRUD/dashboard/DashboardCard.tsx +++ b/superset-frontend/src/views/CRUD/dashboard/DashboardCard.tsx @@ -21,7 +21,8 @@ import { Link, useHistory } from 'react-router-dom'; import { t, useTheme } from '@superset-ui/core'; import { handleDashboardDelete, CardStyles } from 'src/views/CRUD/utils'; import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import ConfirmStatusChange from 'src/components/ConfirmStatusChange'; import ListViewCard from 'src/components/ListViewCard'; import Icons from 'src/components/Icons'; diff --git a/superset-frontend/src/views/CRUD/welcome/SavedQueries.tsx b/superset-frontend/src/views/CRUD/welcome/SavedQueries.tsx index f9bce955d0c7c..e4d76afe7d8ce 100644 --- a/superset-frontend/src/views/CRUD/welcome/SavedQueries.tsx +++ b/superset-frontend/src/views/CRUD/welcome/SavedQueries.tsx @@ -23,7 +23,8 @@ import sql from 'react-syntax-highlighter/dist/cjs/languages/hljs/sql'; import github from 'react-syntax-highlighter/dist/cjs/styles/hljs/github'; import { LoadingCards } from 'src/views/CRUD/welcome/Welcome'; import withToasts from 'src/components/MessageToasts/withToasts'; -import { Dropdown, Menu } from 'src/common/components'; +import { Dropdown } from 'src/common/components'; +import { Menu } from 'src/components/Menu'; import { copyQueryLink, useListViewResource } from 'src/views/CRUD/hooks'; import ListViewCard from 'src/components/ListViewCard'; import DeleteModal from 'src/components/DeleteModal'; diff --git a/superset-frontend/src/views/components/LanguagePicker.test.tsx b/superset-frontend/src/views/components/LanguagePicker.test.tsx index 6fa5fe26eb55b..230c89d18b48c 100644 --- a/superset-frontend/src/views/components/LanguagePicker.test.tsx +++ b/superset-frontend/src/views/components/LanguagePicker.test.tsx @@ -19,7 +19,7 @@ import React from 'react'; import { render, screen } from 'spec/helpers/testing-library'; import userEvent from '@testing-library/user-event'; -import { MainNav as Menu } from 'src/common/components'; +import { MainNav as Menu } from 'src/components/Menu'; import LanguagePicker from './LanguagePicker'; const mockedProps = { diff --git a/superset-frontend/src/views/components/LanguagePicker.tsx b/superset-frontend/src/views/components/LanguagePicker.tsx index faba09a87ffa5..a9a1b5614ba77 100644 --- a/superset-frontend/src/views/components/LanguagePicker.tsx +++ b/superset-frontend/src/views/components/LanguagePicker.tsx @@ -17,7 +17,7 @@ * under the License. */ import React from 'react'; -import { MainNav as Menu } from 'src/common/components'; +import { MainNav as Menu } from 'src/components/Menu'; import { styled } from '@superset-ui/core'; import Icons from 'src/components/Icons'; diff --git a/superset-frontend/src/views/components/Menu.tsx b/superset-frontend/src/views/components/Menu.tsx index 313c4b5cac071..3c51beef66f49 100644 --- a/superset-frontend/src/views/components/Menu.tsx +++ b/superset-frontend/src/views/components/Menu.tsx @@ -21,13 +21,8 @@ import { styled, css, useTheme, SupersetTheme } from '@superset-ui/core'; import { debounce } from 'lodash'; import { Global } from '@emotion/react'; import { getUrlParam } from 'src/utils/urlUtils'; -import { - MainNav as DropdownMenu, - MenuMode, - Row, - Col, - Grid, -} from 'src/common/components'; +import { Row, Col, Grid } from 'src/common/components'; +import { MainNav as DropdownMenu, MenuMode } from 'src/components/Menu'; import { Tooltip } from 'src/components/Tooltip'; import { Link } from 'react-router-dom'; import Icons from 'src/components/Icons'; diff --git a/superset-frontend/src/views/components/MenuRight.tsx b/superset-frontend/src/views/components/MenuRight.tsx index fe55e832b7338..342c9257ecb09 100644 --- a/superset-frontend/src/views/components/MenuRight.tsx +++ b/superset-frontend/src/views/components/MenuRight.tsx @@ -17,7 +17,7 @@ * under the License. */ import React, { useState } from 'react'; -import { MainNav as Menu } from 'src/common/components'; +import { MainNav as Menu } from 'src/components/Menu'; import { t, styled, css, SupersetTheme } from '@superset-ui/core'; import { Link } from 'react-router-dom'; import Icons from 'src/components/Icons'; diff --git a/superset-frontend/src/views/components/SubMenu.tsx b/superset-frontend/src/views/components/SubMenu.tsx index 917adfab703f2..957a2dc4ccffe 100644 --- a/superset-frontend/src/views/components/SubMenu.tsx +++ b/superset-frontend/src/views/components/SubMenu.tsx @@ -21,7 +21,8 @@ import { Link, useHistory } from 'react-router-dom'; import { styled } from '@superset-ui/core'; import cx from 'classnames'; import { debounce } from 'lodash'; -import { Menu, MenuMode, Row } from 'src/common/components'; +import { Row } from 'src/common/components'; +import { Menu, MenuMode } from 'src/components/Menu'; import Button, { OnClickHandler } from 'src/components/Button'; const StyledHeader = styled.div` From 79633ce673dd1cf62b6a5004be1b5bceeddd7597 Mon Sep 17 00:00:00 2001 From: Smart-Codi Date: Fri, 25 Feb 2022 15:07:46 +0000 Subject: [PATCH 17/37] fix: null schema issue in saved queries (#18849) --- .../src/SqlLab/components/SqlEditorLeftBar/index.tsx | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx index 4ea89ad477ef1..7bbdfcf6345ec 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx +++ b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx @@ -39,7 +39,7 @@ interface actionsTypes { addTable: (queryEditor: any, value: any, schema: any) => void; setDatabases: (arg0: any) => {}; addDangerToast: (msg: string) => void; - queryEditorSetSchema: (schema?: string) => void; + queryEditorSetSchema: (queryEditor: QueryEditor, schema?: string) => void; queryEditorSetSchemaOptions: () => void; queryEditorSetTableOptions: (options: Array) => void; resetState: () => void; @@ -132,6 +132,10 @@ export default function SqlEditorLeftBar({ const shouldShowReset = window.location.search === '?reset=1'; const tableMetaDataHeight = height - 130; // 130 is the height of the selects above + const onSchemaChange = (schema: string) => { + actions.queryEditorSetSchema(queryEditor, schema); + }; + return (
Date: Sat, 26 Feb 2022 07:12:04 +1300 Subject: [PATCH 18/37] fix: bump Helm chart release version (#18751) (#18758) Co-authored-by: wiktor2200 --- superset/views/database/forms.py | 17 ++++++++--- superset/views/database/views.py | 33 --------------------- tests/integration_tests/csv_upload_tests.py | 4 +++ 3 files changed, 17 insertions(+), 37 deletions(-) diff --git a/superset/views/database/forms.py b/superset/views/database/forms.py index 354cf4203e308..4cf594c48705e 100644 --- a/superset/views/database/forms.py +++ b/superset/views/database/forms.py @@ -29,7 +29,7 @@ StringField, ) from wtforms.ext.sqlalchemy.fields import QuerySelectField -from wtforms.validators import DataRequired, Length, NumberRange, Optional +from wtforms.validators import DataRequired, Length, NumberRange, Optional, Regexp from superset import app, db, security_manager from superset.forms import ( @@ -94,7 +94,10 @@ class CsvToDatabaseForm(UploadToDatabaseForm): name = StringField( _("Table Name"), description=_("Name of table to be created from csv data."), - validators=[DataRequired()], + validators=[ + DataRequired(), + Regexp(r"^[^\.]+$", message=_("Table name cannot contain a schema")), + ], widget=BS3TextFieldWidget(), ) csv_file = FileField( @@ -245,7 +248,10 @@ class ExcelToDatabaseForm(UploadToDatabaseForm): name = StringField( _("Table Name"), description=_("Name of table to be created from excel data."), - validators=[DataRequired()], + validators=[ + DataRequired(), + Regexp(r"^[^\.]+$", message=_("Table name cannot contain a schema")), + ], widget=BS3TextFieldWidget(), ) excel_file = FileField( @@ -378,7 +384,10 @@ class ColumnarToDatabaseForm(UploadToDatabaseForm): name = StringField( _("Table Name"), description=_("Name of table to be created from columnar data."), - validators=[DataRequired()], + validators=[ + DataRequired(), + Regexp(r"^[^\.]+$", message=_("Table name cannot contain a schema")), + ], widget=BS3TextFieldWidget(), ) columnar_file = MultipleFileField( diff --git a/superset/views/database/views.py b/superset/views/database/views.py index a739a035a1ad0..115d168ed636a 100644 --- a/superset/views/database/views.py +++ b/superset/views/database/views.py @@ -142,17 +142,6 @@ def form_post(self, form: CsvToDatabaseForm) -> Response: flash(message, "danger") return redirect("/csvtodatabaseview/form") - if "." in csv_table.table and csv_table.schema: - message = _( - "You cannot specify a namespace both in the name of the table: " - '"%(csv_table.table)s" and in the schema field: ' - '"%(csv_table.schema)s". Please remove one', - table=csv_table.table, - schema=csv_table.schema, - ) - flash(message, "danger") - return redirect("/csvtodatabaseview/form") - try: df = pd.concat( pd.read_csv( @@ -289,17 +278,6 @@ def form_post(self, form: ExcelToDatabaseForm) -> Response: flash(message, "danger") return redirect("/exceltodatabaseview/form") - if "." in excel_table.table and excel_table.schema: - message = _( - "You cannot specify a namespace both in the name of the table: " - '"%(excel_table.table)s" and in the schema field: ' - '"%(excel_table.schema)s". Please remove one', - table=excel_table.table, - schema=excel_table.schema, - ) - flash(message, "danger") - return redirect("/exceltodatabaseview/form") - uploaded_tmp_file_path = tempfile.NamedTemporaryFile( # pylint: disable=consider-using-with dir=app.config["UPLOAD_FOLDER"], suffix=os.path.splitext(form.excel_file.data.filename)[1].lower(), @@ -459,17 +437,6 @@ def form_post( # pylint: disable=too-many-locals flash(message, "danger") return redirect("/columnartodatabaseview/form") - if "." in columnar_table.table and columnar_table.schema: - message = _( - "You cannot specify a namespace both in the name of the table: " - '"%(columnar_table.table)s" and in the schema field: ' - '"%(columnar_table.schema)s". Please remove one', - table=columnar_table.table, - schema=columnar_table.schema, - ) - flash(message, "danger") - return redirect("/columnartodatabaseview/form") - try: chunks = [read(file, **kwargs) for file in files] df = pd.concat(chunks) diff --git a/tests/integration_tests/csv_upload_tests.py b/tests/integration_tests/csv_upload_tests.py index 862a6eadfec60..d5da25c38c115 100644 --- a/tests/integration_tests/csv_upload_tests.py +++ b/tests/integration_tests/csv_upload_tests.py @@ -219,6 +219,10 @@ def test_import_csv_enforced_schema(mock_event_logger): full_table_name = f"admin_database.{CSV_UPLOAD_TABLE_W_SCHEMA}" + # Invalid table name + resp = upload_csv(CSV_FILENAME1, full_table_name) + assert "Table name cannot contain a schema" in resp + # no schema specified, fail upload resp = upload_csv(CSV_FILENAME1, CSV_UPLOAD_TABLE_W_SCHEMA, extra={"schema": None}) assert ( From 147dc5af71e67f667dea6990ab1a3efbca5698d6 Mon Sep 17 00:00:00 2001 From: AAfghahi <48933336+AAfghahi@users.noreply.github.com> Date: Fri, 25 Feb 2022 13:48:15 -0500 Subject: [PATCH 19/37] fix(sql Lab tabs): Empty SQL Lab tabs (#18817) * Empty SQL table message on zero tabs * sql editor no editor tab bug fix * Revert Error message * empty state tab state * added a unit test * addressed reviews * kasia feedback Co-authored-by: Yahya Kayani --- .../TabbedSqlEditors.test.jsx | 7 ++++ .../components/TabbedSqlEditors/index.jsx | 40 +++++++++++++++++++ .../src/SqlLab/reducers/sqlLab.js | 2 +- .../src/assets/images/empty_sql_chart.svg | 22 ++++++++++ 4 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 superset-frontend/src/assets/images/empty_sql_chart.svg diff --git a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/TabbedSqlEditors.test.jsx b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/TabbedSqlEditors.test.jsx index 7acef14cf06c7..552afe37f8fe2 100644 --- a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/TabbedSqlEditors.test.jsx +++ b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/TabbedSqlEditors.test.jsx @@ -79,6 +79,7 @@ describe('TabbedSqlEditors', () => { defaultQueryLimit: 1000, maxRow: 100000, }; + const getWrapper = () => shallow() .dive() @@ -227,4 +228,10 @@ describe('TabbedSqlEditors', () => { wrapper.setProps({ offline: true }); expect(wrapper.find(EditableTabs).props().hideAdd).toBe(true); }); + it('should have an empty state when query editors is empty', () => { + wrapper = getWrapper(); + wrapper.setProps({ queryEditors: [] }); + const firstTab = wrapper.find(EditableTabs.TabPane).first(); + expect(firstTab.props()['data-key']).toBe(0); + }); }); diff --git a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx index cf2867eb8962c..4e486c1c69f4a 100644 --- a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx +++ b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx @@ -30,6 +30,7 @@ import { areArraysShallowEqual } from 'src/reduxUtils'; import { Tooltip } from 'src/components/Tooltip'; import { detectOS } from 'src/utils/common'; import * as Actions from 'src/SqlLab/actions/sqlLab'; +import { EmptyStateBig } from 'src/components/EmptyState'; import SqlEditor from '../SqlEditor'; import TabStatusIcon from '../TabStatusIcon'; @@ -64,6 +65,10 @@ const TabTitleWrapper = styled.div` align-items: center; `; +const StyledTab = styled.span` + line-height: 24px; +`; + const TabTitle = styled.span` margin-right: ${({ theme }) => theme.gridUnit * 2}px; text-transform: none; @@ -314,6 +319,7 @@ class TabbedSqlEditors extends React.PureComponent { } render() { + const noQueryEditors = this.props.queryEditors?.length === 0; const editors = this.props.queryEditors.map(qe => { let latestQuery; if (qe.latestQueryId) { @@ -401,6 +407,37 @@ class TabbedSqlEditors extends React.PureComponent { ); }); + const emptyTab = ( + + {t('Add a new tab')} + + + + + ); + + const emptyTabState = ( + + + + ); + return ( noQueryEditors && this.newQueryEditor()} onEdit={this.handleEdit} + type={noQueryEditors ? 'card' : 'editable-card'} addIcon={ {editors} + {noQueryEditors && emptyTabState} ); } diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.js b/superset-frontend/src/SqlLab/reducers/sqlLab.js index 68de3ebd66d8c..d5abf3b540b74 100644 --- a/superset-frontend/src/SqlLab/reducers/sqlLab.js +++ b/superset-frontend/src/SqlLab/reducers/sqlLab.js @@ -358,7 +358,7 @@ export default function sqlLabReducer(state = {}, action) { [actions.SET_ACTIVE_QUERY_EDITOR]() { const qeIds = state.queryEditors.map(qe => qe.id); if ( - qeIds.indexOf(action.queryEditor.id) > -1 && + qeIds.indexOf(action.queryEditor?.id) > -1 && state.tabHistory[state.tabHistory.length - 1] !== action.queryEditor.id ) { const tabHistory = state.tabHistory.slice(); diff --git a/superset-frontend/src/assets/images/empty_sql_chart.svg b/superset-frontend/src/assets/images/empty_sql_chart.svg new file mode 100644 index 0000000000000..6ab969575c9db --- /dev/null +++ b/superset-frontend/src/assets/images/empty_sql_chart.svg @@ -0,0 +1,22 @@ + + + + + From 2baceddbf16f087ae9247406f23579a6c6664fb6 Mon Sep 17 00:00:00 2001 From: Ville Brofeldt <33317356+villebro@users.noreply.github.com> Date: Fri, 25 Feb 2022 20:50:55 +0200 Subject: [PATCH 20/37] fix(dataset): handle missing database in migration (#18948) --- .../migrations/versions/b8d3a24d9131_new_dataset_models.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py b/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py index e6d4537272ff3..1e139dd6a341d 100644 --- a/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py +++ b/superset/migrations/versions/b8d3a24d9131_new_dataset_models.py @@ -321,8 +321,10 @@ def after_insert(target: SqlaTable) -> None: # pylint: disable=too-many-locals # table names database = ( target.database - or session.query(Database).filter_by(id=target.database_id).one() + or session.query(Database).filter_by(id=target.database_id).first() ) + if not database: + return engine = database.get_sqla_engine(schema=target.schema) conditional_quote = engine.dialect.identifier_preparer.quote From 4775d3900f2de6b2b6f16f017f31f266f765434c Mon Sep 17 00:00:00 2001 From: GaryNg Date: Mon, 28 Feb 2022 21:05:43 +0800 Subject: [PATCH 21/37] docs: fix sql templating typos (#18955) --- docs/docs/installation/sql-templating.mdx | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/docs/installation/sql-templating.mdx b/docs/docs/installation/sql-templating.mdx index d96b130147a46..8bb419ca127d8 100644 --- a/docs/docs/installation/sql-templating.mdx +++ b/docs/docs/installation/sql-templating.mdx @@ -119,7 +119,7 @@ In this section, we'll walkthrough the pre-defined Jinja macros in Superset. The `{{ current_username() }}` macro returns the username of the currently logged in user. -If you have caching enabled in your Superset configuration, then by defaul the the `username` value will be used +If you have caching enabled in your Superset configuration, then by default the the `username` value will be used by Superset when calculating the cache key. A cache key is a unique identifer that determines if there's a cache hit in the future and Superset can retrieve cached data. @@ -132,7 +132,7 @@ cache key by adding the following parameter to your Jinja code: **Current User ID** -The `{{ current_user_id()}}` macro returns the user_id of the currently logged in user. +The `{{ current_user_id() }}` macro returns the user_id of the currently logged in user. If you have caching enabled in your Superset configuration, then by defaul the the `user_id` value will be used by Superset when calculating the cache key. A cache key is a unique identifer that determines if there's a @@ -197,8 +197,8 @@ You can retrieve the value for a specific filter as a list using `{{ filter_valu This is useful if: -- you want to use a filter component to filter a query where the name of filter component column doesn't match the one in the select statement -- you want to have the ability for filter inside the main query for performance purposes +- You want to use a filter component to filter a query where the name of filter component column doesn't match the one in the select statement +- You want to have the ability for filter inside the main query for performance purposes Here's a concrete example: @@ -218,9 +218,9 @@ returns the operator specified in the Explore UI. This is useful if: -- you want to handle more than the IN operator in your SQL clause -- you want to handle generating custom SQL conditions for a filter -- you want to have the ability to filter inside the main query for speed purposes +- You want to handle more than the IN operator in your SQL clause +- You want to handle generating custom SQL conditions for a filter +- You want to have the ability to filter inside the main query for speed purposes Here's a concrete example: From c56dc8eace6a71b45240d1bb6768d75661052a2e Mon Sep 17 00:00:00 2001 From: Kamil Gabryjelski Date: Mon, 28 Feb 2022 14:07:55 +0100 Subject: [PATCH 22/37] feat(legacy-preset-chart-deckgl): Add ,.1f and ,.2f value formats to deckgl charts (#18945) * Add ,.1f and ,.2f value formats to deckgl charts * Remove duplicated code --- .../src/utils/D3Formatting.ts | 2 ++ .../src/utilities/Shared_DeckGL.jsx | 7 ++++--- .../src/utilities/controls.jsx | 17 ----------------- 3 files changed, 6 insertions(+), 20 deletions(-) diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/utils/D3Formatting.ts b/superset-frontend/packages/superset-ui-chart-controls/src/utils/D3Formatting.ts index 2afac915269ba..02585cc58db90 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/utils/D3Formatting.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/utils/D3Formatting.ts @@ -34,6 +34,8 @@ export const D3_FORMAT_OPTIONS: [string, string][] = [ ['.2%', '.2% (12345.432 => 1234543.20%)'], ['.3%', '.3% (12345.432 => 1234543.200%)'], ['.4r', '.4r (12345.432 => 12350)'], + [',.1f', ',.1f (12345.432 => 12,345.4)'], + [',.2f', ',.2f (12345.432 => 12,345.43)'], [',.3f', ',.3f (12345.432 => 12,345.432)'], ['+,', '+, (12345.432 => +12,345.432)'], ['$,.2f', '$,.2f (12345.432 => $12,345.43)'], diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/Shared_DeckGL.jsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/Shared_DeckGL.jsx index d3e3751479a36..b801338f0c285 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/Shared_DeckGL.jsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/Shared_DeckGL.jsx @@ -21,8 +21,8 @@ import React from 'react'; import { t, validateNonEmpty } from '@superset-ui/core'; -import { sharedControls } from '@superset-ui/chart-controls'; -import { D3_FORMAT_OPTIONS, columnChoices, PRIMARY_COLOR } from './controls'; +import { D3_FORMAT_OPTIONS, sharedControls } from '@superset-ui/chart-controls'; +import { columnChoices, PRIMARY_COLOR } from './controls'; const DEFAULT_VIEWPORT = { longitude: 6.85236157047845, @@ -161,9 +161,10 @@ export const legendFormat = { description: t('Choose the format for legend values'), type: 'SelectControl', clearable: false, - default: D3_FORMAT_OPTIONS[0], + default: D3_FORMAT_OPTIONS[0][0], choices: D3_FORMAT_OPTIONS, renderTrigger: true, + freeForm: true, }, }; diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/controls.jsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/controls.jsx index 5281fa772fea9..03092e7316af6 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/controls.jsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/utilities/controls.jsx @@ -27,23 +27,6 @@ export function columnChoices(datasource) { return []; } -// input choices & options -export const D3_FORMAT_OPTIONS = [ - ['SMART_NUMBER', 'Adaptative formating'], - ['~g', 'Original value'], - [',d', ',d (12345.432 => 12,345)'], - ['.1s', '.1s (12345.432 => 10k)'], - ['.3s', '.3s (12345.432 => 12.3k)'], - [',.1%', ',.1% (12345.432 => 1,234,543.2%)'], - ['.3%', '.3% (12345.432 => 1234543.200%)'], - ['.4r', '.4r (12345.432 => 12350)'], - [',.3f', ',.3f (12345.432 => 12,345.432)'], - ['+,', '+, (12345.432 => +12,345.432)'], - ['$,.2f', '$,.2f (12345.432 => $12,345.43)'], - ['DURATION', 'Duration in ms (66000 => 1m 6s)'], - ['DURATION_SUB', 'Duration in ms (100.40008 => 100ms 400µs 80ns)'], -]; - export const PRIMARY_COLOR = { r: 0, g: 122, b: 135, a: 1 }; export default { From eafe0cfc6f040670a9b35ebcd27f5c83eabe068e Mon Sep 17 00:00:00 2001 From: Geido <60598000+geido@users.noreply.github.com> Date: Mon, 28 Feb 2022 18:56:13 +0200 Subject: [PATCH 23/37] fix(Explore): Pivot table V2 sort by failure with D&D enabled (#18835) * wip * Add tests and clean up * Clean up * Remove unused import --- .../src/query/buildQueryObject.ts | 12 +++++++++- .../src/query/types/Column.ts | 6 ++++- .../test/query/buildQueryObject.test.ts | 22 +++++++++++++++++++ 3 files changed, 38 insertions(+), 2 deletions(-) diff --git a/superset-frontend/packages/superset-ui-core/src/query/buildQueryObject.ts b/superset-frontend/packages/superset-ui-core/src/query/buildQueryObject.ts index c49bc5ce6aa50..52fa1ffed0c2e 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/buildQueryObject.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/buildQueryObject.ts @@ -21,9 +21,12 @@ import { AdhocFilter, QueryFieldAliases, + QueryFormColumn, QueryFormData, QueryObject, QueryObjectFilterClause, + isPhysicalColumn, + isAdhocColumn, } from './types'; import processFilters from './processFilters'; import extractExtras from './extractExtras'; @@ -89,6 +92,12 @@ export default function buildQueryObject( ...extras, ...filterFormData, }); + const normalizeSeriesLimitMetric = (column: QueryFormColumn | undefined) => { + if (isAdhocColumn(column) || isPhysicalColumn(column)) { + return column; + } + return undefined; + }; let queryObject: QueryObject = { // fallback `null` to `undefined` so they won't be sent to the backend @@ -113,13 +122,14 @@ export default function buildQueryObject( : numericRowOffset, series_columns, series_limit, - series_limit_metric, + series_limit_metric: normalizeSeriesLimitMetric(series_limit_metric), timeseries_limit: limit ? Number(limit) : 0, timeseries_limit_metric: timeseries_limit_metric || undefined, order_desc: typeof order_desc === 'undefined' ? true : order_desc, url_params: url_params || undefined, custom_params, }; + // override extra form data used by native and cross filters queryObject = overrideExtraFormData(queryObject, overrides); diff --git a/superset-frontend/packages/superset-ui-core/src/query/types/Column.ts b/superset-frontend/packages/superset-ui-core/src/query/types/Column.ts index 8fcf871647e3c..78e4a301c960d 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/types/Column.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/types/Column.ts @@ -54,7 +54,11 @@ export interface Column { export default {}; export function isPhysicalColumn( - column: AdhocColumn | PhysicalColumn, + column?: AdhocColumn | PhysicalColumn, ): column is PhysicalColumn { return typeof column === 'string'; } + +export function isAdhocColumn(column?: AdhocColumn | PhysicalColumn) { + return (column as AdhocColumn)?.sqlExpression !== undefined; +} diff --git a/superset-frontend/packages/superset-ui-core/test/query/buildQueryObject.test.ts b/superset-frontend/packages/superset-ui-core/test/query/buildQueryObject.test.ts index 1d3e19854c24b..b2ee6f579d5c1 100644 --- a/superset-frontend/packages/superset-ui-core/test/query/buildQueryObject.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/query/buildQueryObject.test.ts @@ -152,6 +152,28 @@ describe('buildQueryObject', () => { expect(query.timeseries_limit_metric).toEqual(metric); }); + it('should build series_limit_metric', () => { + const metric = 'country'; + query = buildQueryObject({ + datasource: '5__table', + granularity_sqla: 'ds', + viz_type: 'pivot_table_v2', + series_limit_metric: metric, + }); + expect(query.series_limit_metric).toEqual(metric); + }); + + it('should build series_limit_metric as undefined when empty array', () => { + const metric: any = []; + query = buildQueryObject({ + datasource: '5__table', + granularity_sqla: 'ds', + viz_type: 'pivot_table_v2', + series_limit_metric: metric, + }); + expect(query.series_limit_metric).toEqual(undefined); + }); + it('should handle null and non-numeric row_limit and row_offset', () => { const baseQuery = { datasource: '5__table', From 760dab9abdb2199b50006e01cb1e9cc0743cf966 Mon Sep 17 00:00:00 2001 From: serenajiang Date: Mon, 28 Feb 2022 10:26:24 -0800 Subject: [PATCH 24/37] feat(TimeTableViz): sort by first metric (#18896) --- superset/viz.py | 5 +++++ tests/integration_tests/viz_tests.py | 7 +++++++ 2 files changed, 12 insertions(+) diff --git a/superset/viz.py b/superset/viz.py index 26c77c115a408..3a519bb1f9e83 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -854,6 +854,11 @@ def query_obj(self) -> QueryObjectDict: raise QueryObjectValidationError( _("When using 'Group By' you are limited to use a single metric") ) + + sort_by = utils.get_first_metric_name(query_obj["metrics"]) + is_asc = not query_obj.get("order_desc") + query_obj["orderby"] = [(sort_by, is_asc)] + return query_obj def get_data(self, df: pd.DataFrame) -> VizData: diff --git a/tests/integration_tests/viz_tests.py b/tests/integration_tests/viz_tests.py index b9055afd27fd1..465fdb26ef581 100644 --- a/tests/integration_tests/viz_tests.py +++ b/tests/integration_tests/viz_tests.py @@ -1067,6 +1067,13 @@ def test_query_obj_throws_metrics_and_groupby(self, super_query_obj): with self.assertRaises(Exception): test_viz.query_obj() + def test_query_obj_order_by(self): + test_viz = viz.TimeTableViz( + self.get_datasource_mock(), {"metrics": ["sum__A", "count"], "groupby": []} + ) + query_obj = test_viz.query_obj() + self.assertEqual(query_obj["orderby"], [("sum__A", False)]) + class TestBaseDeckGLViz(SupersetTestCase): def test_get_metrics(self): From 85f0715644865c983b9e1b05e7da7225eb091c5b Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Tue, 1 Mar 2022 08:35:55 +1300 Subject: [PATCH 25/37] chore(security): Remove obsolete FAB metric_access permission (#16860) * Update manager.py * Update manager.py * Update manager.py * Update manager.py --- superset/security/manager.py | 1 - 1 file changed, 1 deletion(-) diff --git a/superset/security/manager.py b/superset/security/manager.py index d3cae1f4d01b1..49cfa5de5e880 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -210,7 +210,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods "database_access", "schema_access", "datasource_access", - "metric_access", } ACCESSIBLE_PERMS = {"can_userinfo", "resetmypassword"} From 7e336d1230ca8333afc3e5be7780bc962cbf8478 Mon Sep 17 00:00:00 2001 From: Grace Guo Date: Mon, 28 Feb 2022 15:15:15 -0800 Subject: [PATCH 26/37] fix: add/find owners from report/alert modal (#18971) --- superset-frontend/src/views/CRUD/alert/AlertReportModal.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset-frontend/src/views/CRUD/alert/AlertReportModal.tsx b/superset-frontend/src/views/CRUD/alert/AlertReportModal.tsx index 2934c3cd80ad0..4f40d663bc1af 100644 --- a/superset-frontend/src/views/CRUD/alert/AlertReportModal.tsx +++ b/superset-frontend/src/views/CRUD/alert/AlertReportModal.tsx @@ -595,7 +595,7 @@ const AlertReportModal: FunctionComponent = ({ page_size: pageSize, }); return SupersetClient.get({ - endpoint: `/api/v1/report/related/owners?q=${query}`, + endpoint: `/api/v1/report/related/created_by?q=${query}`, }).then(response => ({ data: response.json.result.map( (item: { value: number; text: string }) => ({ From c4e3c45b3c24034205a1ceeb5387d63dc666a7fe Mon Sep 17 00:00:00 2001 From: Stephen Liu <750188453@qq.com> Date: Tue, 1 Mar 2022 14:53:53 +0800 Subject: [PATCH 27/37] fix(plugin-chart-echarts): fix customize margin (#18958) --- .../plugin-chart-echarts/src/Timeseries/transformers.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformers.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformers.ts index 19fe600ca31c9..c357b2a40ee9d 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformers.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformers.ts @@ -456,18 +456,18 @@ export function getPadding( const yAxisOffset = addYAxisTitleOffset ? TIMESERIES_CONSTANTS.yAxisLabelTopOffset : 0; - const xAxisOffset = addXAxisTitleOffset ? xAxisTitleMargin || 0 : 0; + const xAxisOffset = addXAxisTitleOffset ? Number(xAxisTitleMargin) || 0 : 0; return getChartPadding(showLegend, legendOrientation, margin, { top: yAxisTitlePosition && yAxisTitlePosition === 'Top' - ? TIMESERIES_CONSTANTS.gridOffsetTop + (yAxisTitleMargin || 0) + ? TIMESERIES_CONSTANTS.gridOffsetTop + (Number(yAxisTitleMargin) || 0) : TIMESERIES_CONSTANTS.gridOffsetTop + yAxisOffset, bottom: zoomable ? TIMESERIES_CONSTANTS.gridOffsetBottomZoomable + xAxisOffset : TIMESERIES_CONSTANTS.gridOffsetBottom + xAxisOffset, left: yAxisTitlePosition === 'Left' - ? TIMESERIES_CONSTANTS.gridOffsetLeft + (yAxisTitleMargin || 0) + ? TIMESERIES_CONSTANTS.gridOffsetLeft + (Number(yAxisTitleMargin) || 0) : TIMESERIES_CONSTANTS.gridOffsetLeft, right: showLegend && legendOrientation === LegendOrientation.Right From 2072225a8637299c23c337966c9d5aaaeaaa732b Mon Sep 17 00:00:00 2001 From: Kamil Gabryjelski Date: Tue, 1 Mar 2022 11:43:57 +0100 Subject: [PATCH 28/37] fix(native-filters): Default value not shown in native filters (#18963) * fix(native-filters): Default value not shown in native filters * Reuse stringified datamask --- .../dashboard/components/nativeFilters/FilterBar/index.tsx | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx index 6aef6e3069a7e..cb5e3e80a7ae1 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx @@ -289,6 +289,11 @@ const FilterBar: React.FC = ({ }, [JSON.stringify(filters), JSON.stringify(previousFilters)]); const dataMaskAppliedText = JSON.stringify(dataMaskApplied); + + useEffect(() => { + setDataMaskSelected(() => dataMaskApplied); + }, [dataMaskAppliedText, setDataMaskSelected]); + useEffect(() => { publishDataMask(history, dashboardId, updateKey, dataMaskApplied, tabId); // eslint-disable-next-line react-hooks/exhaustive-deps From 37752cea9b3832b763f8fcb31e8895857cbb1955 Mon Sep 17 00:00:00 2001 From: smileydev <47900232+prosdev0107@users.noreply.github.com> Date: Tue, 1 Mar 2022 09:16:48 -0500 Subject: [PATCH 29/37] fix(external-link): change the external link in creating chart dashboard (#18982) --- superset-frontend/src/addSlice/AddSliceContainer.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset-frontend/src/addSlice/AddSliceContainer.tsx b/superset-frontend/src/addSlice/AddSliceContainer.tsx index db8da84beb4dd..ee3bb16820294 100644 --- a/superset-frontend/src/addSlice/AddSliceContainer.tsx +++ b/superset-frontend/src/addSlice/AddSliceContainer.tsx @@ -294,7 +294,7 @@ export default class AddSliceContainer extends React.PureComponent< 'Instructions to add a dataset are available in the Superset tutorial.', )}{' '} From ca93d63bbb72630f90afcd7fc514014cad9b3e52 Mon Sep 17 00:00:00 2001 From: smileydev <47900232+prosdev0107@users.noreply.github.com> Date: Tue, 1 Mar 2022 10:05:11 -0500 Subject: [PATCH 30/37] fix(tooltip-not-dissapeared): make the tooltip disappear when mouse away (#18892) * fix(tooltip-not-dissapeared): make the tooltip disappear when mouse away * fix(tooltip-not-dissapeared): change css props instead of sytles props --- superset-frontend/src/components/Button/index.tsx | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/superset-frontend/src/components/Button/index.tsx b/superset-frontend/src/components/Button/index.tsx index 8851a6f9cd688..63a017836a1f7 100644 --- a/superset-frontend/src/components/Button/index.tsx +++ b/superset-frontend/src/components/Button/index.tsx @@ -206,6 +206,7 @@ export default function Button(props: ButtonProps) { buttonStyle === 'link' ? 'transparent' : backgroundColorDisabled, borderColor: buttonStyle === 'link' ? 'transparent' : borderColorDisabled, + pointerEvents: 'none', }, marginLeft: 0, '& + .superset-button': { @@ -230,7 +231,11 @@ export default function Button(props: ButtonProps) { > {/* this ternary wraps the button in a span so that the tooltip shows up when the button is disabled. */} - {disabled ? {button} : button} + {disabled ? ( + {button} + ) : ( + button + )} ); } From ec746c2a1066f2fd8d6e809397d8172cc336b833 Mon Sep 17 00:00:00 2001 From: Kamil Gabryjelski Date: Tue, 1 Mar 2022 17:33:09 +0100 Subject: [PATCH 31/37] chore(native-filters): Add unit tests for filter cards (#18967) * chore(native-filters): Add unit tests for filter cards * Fix test --- .../FilterCard/FilterCard.test.tsx | 307 ++++++++++++++++++ .../nativeFilters/FilterCard/ScopeRow.tsx | 11 +- 2 files changed, 312 insertions(+), 6 deletions(-) create mode 100644 superset-frontend/src/dashboard/components/nativeFilters/FilterCard/FilterCard.test.tsx diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/FilterCard.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/FilterCard.test.tsx new file mode 100644 index 0000000000000..5a0663032d3cf --- /dev/null +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/FilterCard.test.tsx @@ -0,0 +1,307 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React from 'react'; +import * as reactRedux from 'react-redux'; +import { Filter, NativeFilterType } from '@superset-ui/core'; +import userEvent from '@testing-library/user-event'; +import { render, screen } from 'spec/helpers/testing-library'; +import { DASHBOARD_ROOT_ID } from 'src/dashboard/util/constants'; +import { SET_DIRECT_PATH } from 'src/dashboard/actions/dashboardState'; +import { FilterCardContent } from './FilterCardContent'; + +const baseInitialState = { + nativeFilters: { + filters: { + 'NATIVE_FILTER-1': { + id: 'NATIVE_FILTER-1', + controlValues: {}, + name: 'Native filter 1', + filterType: 'filter_select', + targets: [ + { + datasetId: 1, + column: { + name: 'gender', + }, + }, + ], + defaultDataMask: {}, + cascadeParentIds: [], + scope: { + rootPath: [DASHBOARD_ROOT_ID], + excluded: [], + }, + type: NativeFilterType.NATIVE_FILTER, + description: '', + }, + 'NATIVE_FILTER-2': { + id: 'NATIVE_FILTER-2', + controlValues: {}, + name: 'Native filter 2', + filterType: 'filter_select', + targets: [ + { + datasetId: 1, + column: { + name: 'gender', + }, + }, + ], + defaultDataMask: {}, + cascadeParentIds: [], + scope: { + rootPath: [DASHBOARD_ROOT_ID], + excluded: [], + }, + type: NativeFilterType.NATIVE_FILTER, + description: '', + }, + }, + }, + charts: { + '1': { + id: 1, + }, + '2': { + id: 2, + }, + '3': { + id: 3, + }, + }, + dashboardLayout: { + past: [], + future: [], + present: { + ROOT_ID: { + children: ['TABS-1'], + id: 'ROOT_ID', + type: 'ROOT', + }, + + 'TABS-1': { + children: ['TAB-1', 'TAB-2'], + id: 'TABS-1', + meta: {}, + parents: ['ROOT_ID'], + type: 'TABS', + }, + 'TAB-1': { + children: [], + id: 'TAB-1', + meta: { + defaultText: 'Tab title', + placeholder: 'Tab title', + text: 'Tab 1', + }, + parents: ['ROOT_ID', 'TABS-1'], + type: 'TAB', + }, + 'TAB-2': { + children: [], + id: 'TAB-2', + meta: { + defaultText: 'Tab title', + placeholder: 'Tab title', + text: 'Tab 2', + }, + parents: ['ROOT_ID', 'TABS-1'], + type: 'TAB', + }, + 'CHART-1': { + children: [], + id: 'CHART-1', + meta: { + chartId: 1, + sliceName: 'Test chart', + }, + parents: ['ROOT_ID', 'TABS-1', 'TAB-1'], + type: 'CHART', + }, + 'CHART-2': { + children: [], + id: 'CHART-2', + meta: { + chartId: 2, + sliceName: 'Test chart 2', + }, + parents: ['ROOT_ID', 'TABS-1', 'TAB-1'], + type: 'CHART', + }, + 'CHART-3': { + children: [], + id: 'CHART-3', + meta: { + chartId: 3, + sliceName: 'Test chart 3', + }, + parents: ['ROOT_ID', 'TABS-1', 'TAB-1'], + type: 'CHART', + }, + 'CHART-4': { + children: [], + id: 'CHART-4', + meta: { + chartId: 4, + sliceName: 'Test chart 4', + }, + parents: ['ROOT_ID', 'TABS-1', 'TAB-2'], + type: 'CHART', + }, + }, + }, +}; +const baseFilter: Filter = { + id: 'NATIVE_FILTER-1', + controlValues: {}, + name: 'Native filter 1', + filterType: 'filter_select', + targets: [ + { + datasetId: 1, + column: { + name: 'gender', + }, + }, + ], + defaultDataMask: {}, + cascadeParentIds: [], + scope: { + rootPath: [DASHBOARD_ROOT_ID], + excluded: [], + }, + type: NativeFilterType.NATIVE_FILTER, + description: '', +}; + +jest.mock('@superset-ui/core', () => ({ + // @ts-ignore + ...jest.requireActual('@superset-ui/core'), + getChartMetadataRegistry: () => ({ + get: (type: string) => { + if (type === 'filter_select') { + return { name: 'Select filter' }; + } + return undefined; + }, + }), +})); + +// extract text from embedded html tags +// source: https://polvara.me/posts/five-things-you-didnt-know-about-testing-library +const getTextInHTMLTags = + (target: string | RegExp) => (content: string, node: Element) => { + const hasText = (node: Element) => node.textContent === target; + const nodeHasText = hasText(node); + const childrenDontHaveText = Array.from(node.children).every( + child => !hasText(child), + ); + + return nodeHasText && childrenDontHaveText; + }; + +const renderContent = (filter = baseFilter, initialState = baseInitialState) => + render(, { + useRedux: true, + initialState, + }); + +describe('Filter Card', () => { + it('Basic', () => { + renderContent(); + expect(screen.getByText('Native filter 1')).toBeVisible(); + expect(screen.getByLabelText('filter-small')).toBeVisible(); + + expect(screen.getByText('Filter type')).toBeVisible(); + expect(screen.getByText('Select filter')).toBeVisible(); + + expect(screen.getByText('Scope')).toBeVisible(); + expect(screen.getByText('All charts')).toBeVisible(); + + expect(screen.queryByText('Dependencies')).not.toBeInTheDocument(); + }); + + describe('Scope', () => { + it('Scope with excluded', () => { + const filter = { + ...baseFilter, + scope: { rootPath: [DASHBOARD_ROOT_ID], excluded: [1, 4] }, + }; + renderContent(filter); + expect(screen.getByText('Scope')).toBeVisible(); + expect(screen.getByText('Test chart 2')).toBeVisible(); + expect( + screen.getByText(getTextInHTMLTags('Test chart 2, Test chart 3')), + ).toBeVisible(); + }); + + it('Scope with top level tab as root', () => { + const filter = { + ...baseFilter, + scope: { rootPath: ['TAB-1', 'TAB-2'], excluded: [1, 2] }, + }; + renderContent(filter); + expect(screen.getByText('Scope')).toBeVisible(); + expect( + screen.getByText(getTextInHTMLTags('Tab 2, Test chart 3')), + ).toBeVisible(); + }); + + it('Empty scope', () => { + const filter = { + ...baseFilter, + scope: { rootPath: [], excluded: [1, 2, 3, 4] }, + }; + renderContent(filter); + expect(screen.getByText('Scope')).toBeVisible(); + expect(screen.getByText('None')).toBeVisible(); + }); + }); + + describe('Dependencies', () => { + it('Has dependency', () => { + const filter = { + ...baseFilter, + cascadeParentIds: ['NATIVE_FILTER-2'], + }; + renderContent(filter); + expect(screen.getByText('Dependent on')).toBeVisible(); + expect(screen.getByText('Native filter 2')).toBeVisible(); + }); + + it('Focus filter on dependency click', () => { + const useDispatchMock = jest.spyOn(reactRedux, 'useDispatch'); + const dummyDispatch = jest.fn(); + useDispatchMock.mockReturnValue(dummyDispatch); + + const filter = { + ...baseFilter, + cascadeParentIds: ['NATIVE_FILTER-2'], + }; + renderContent(filter); + + userEvent.click(screen.getByText('Native filter 2')); + expect(dummyDispatch).toHaveBeenCalledWith({ + type: SET_DIRECT_PATH, + path: ['NATIVE_FILTER-2'], + }); + }); + }); +}); diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/ScopeRow.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/ScopeRow.tsx index 8f005874d2347..f7ede30692bd4 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/ScopeRow.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/ScopeRow.tsx @@ -47,17 +47,16 @@ export const ScopeRow = React.memo(({ filter }: FilterCardRowProps) => { [elementsTruncated, scope], ); - if (!Array.isArray(scope) || scope.length === 0) { - return null; - } return ( {t('Scope')} - {scope.map((element, index) => ( - {index === 0 ? element : `, ${element}`} - ))} + {scope + ? scope.map((element, index) => ( + {index === 0 ? element : `, ${element}`} + )) + : t('None')} {hasHiddenElements > 0 && ( +{elementsTruncated} From 834686a3d901116369b9c8a947ab4024389938c3 Mon Sep 17 00:00:00 2001 From: Ville Brofeldt <33317356+villebro@users.noreply.github.com> Date: Tue, 1 Mar 2022 18:38:29 +0200 Subject: [PATCH 32/37] bump and harmonize urijs and xss (#18922) --- superset-frontend/package-lock.json | 50 ++++++++----------- superset-frontend/package.json | 2 +- .../legacy-preset-chart-deckgl/package.json | 4 +- .../legacy-preset-chart-nvd3/package.json | 2 +- .../plugins/plugin-chart-table/package.json | 2 +- 5 files changed, 26 insertions(+), 34 deletions(-) diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index b26b3f72afe56..88267cecb56fd 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -137,7 +137,7 @@ "rison": "^0.1.1", "scroll-into-view-if-needed": "^2.2.28", "shortid": "^2.2.6", - "urijs": "^1.19.6", + "urijs": "^1.19.8", "use-immer": "^0.6.0", "use-query-params": "^1.1.9", "yargs": "^15.4.1" @@ -45310,14 +45310,6 @@ "node": ">=0.10.0" } }, - "node_modules/nvd3": { - "version": "1.8.6", - "resolved": "https://registry.npmjs.org/nvd3/-/nvd3-1.8.6.tgz", - "integrity": "sha1-LT66dL8zNjtRAevx0JPFmlOuc8Q=", - "peerDependencies": { - "d3": "^3.4.4" - } - }, "node_modules/nvd3-fork": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/nvd3-fork/-/nvd3-fork-2.0.5.tgz", @@ -54630,9 +54622,9 @@ } }, "node_modules/urijs": { - "version": "1.19.7", - "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.7.tgz", - "integrity": "sha512-Id+IKjdU0Hx+7Zx717jwLPsPeUqz7rAtuVBRLLs+qn+J2nf9NGITWVCxcijgYxBqe83C7sqsQPs6H1pyz3x9gA==" + "version": "1.19.8", + "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.8.tgz", + "integrity": "sha512-iIXHrjomQ0ZCuDRy44wRbyTZVnfVNLVo3Ksz1yxNyE5wV1IDZW2S5Jszy45DTlw/UdsnRT7DyDhIz7Gy+vJumw==" }, "node_modules/urix": { "version": "0.1.0", @@ -56657,9 +56649,9 @@ "dev": true }, "node_modules/xss": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/xss/-/xss-1.0.8.tgz", - "integrity": "sha512-3MgPdaXV8rfQ/pNn16Eio6VXYPTkqwa0vc7GkiymmY/DqR1SE/7VPAAVZz1GJsJFrllMYO3RHfEaiUGjab6TNw==", + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/xss/-/xss-1.0.10.tgz", + "integrity": "sha512-qmoqrRksmzqSKvgqzN0055UFWY7OKx1/9JWeRswwEVX9fCG5jcYRxa/A2DHcmZX6VJvjzHRQ2STeeVcQkrmLSw==", "dependencies": { "commander": "^2.20.3", "cssfilter": "0.0.10" @@ -59702,8 +59694,8 @@ "prop-types": "^15.6.0", "react-bootstrap-slider": "2.1.5", "underscore": "^1.8.3", - "urijs": "^1.18.10", - "xss": "^1.0.6" + "urijs": "^1.19.8", + "xss": "^1.0.10" }, "peerDependencies": { "@superset-ui/chart-controls": "*", @@ -59764,7 +59756,7 @@ "moment": "^2.20.1", "nvd3-fork": "^2.0.5", "prop-types": "^15.6.2", - "urijs": "^1.18.10" + "urijs": "^1.19.8" }, "peerDependencies": { "@superset-ui/chart-controls": "*", @@ -59820,7 +59812,7 @@ "memoize-one": "^5.1.1", "react-table": "^7.6.3", "regenerator-runtime": "^0.13.7", - "xss": "^1.0.8" + "xss": "^1.0.10" }, "peerDependencies": { "@superset-ui/chart-controls": "*", @@ -76512,8 +76504,8 @@ "prop-types": "^15.6.0", "react-bootstrap-slider": "2.1.5", "underscore": "^1.8.3", - "urijs": "^1.18.10", - "xss": "^1.0.6" + "urijs": "^1.19.8", + "xss": "^1.0.10" }, "dependencies": { "d3-scale": { @@ -76570,7 +76562,7 @@ "moment": "^2.20.1", "nvd3-fork": "^2.0.5", "prop-types": "^15.6.2", - "urijs": "^1.18.10" + "urijs": "^1.19.8" } }, "@superset-ui/plugin-chart-echarts": { @@ -76602,7 +76594,7 @@ "memoize-one": "^5.1.1", "react-table": "^7.6.3", "regenerator-runtime": "^0.13.7", - "xss": "^1.0.8" + "xss": "^1.0.10" }, "dependencies": { "d3-array": { @@ -102794,9 +102786,9 @@ } }, "urijs": { - "version": "1.19.7", - "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.7.tgz", - "integrity": "sha512-Id+IKjdU0Hx+7Zx717jwLPsPeUqz7rAtuVBRLLs+qn+J2nf9NGITWVCxcijgYxBqe83C7sqsQPs6H1pyz3x9gA==" + "version": "1.19.8", + "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.8.tgz", + "integrity": "sha512-iIXHrjomQ0ZCuDRy44wRbyTZVnfVNLVo3Ksz1yxNyE5wV1IDZW2S5Jszy45DTlw/UdsnRT7DyDhIz7Gy+vJumw==" }, "urix": { "version": "0.1.0", @@ -104339,9 +104331,9 @@ "dev": true }, "xss": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/xss/-/xss-1.0.8.tgz", - "integrity": "sha512-3MgPdaXV8rfQ/pNn16Eio6VXYPTkqwa0vc7GkiymmY/DqR1SE/7VPAAVZz1GJsJFrllMYO3RHfEaiUGjab6TNw==", + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/xss/-/xss-1.0.10.tgz", + "integrity": "sha512-qmoqrRksmzqSKvgqzN0055UFWY7OKx1/9JWeRswwEVX9fCG5jcYRxa/A2DHcmZX6VJvjzHRQ2STeeVcQkrmLSw==", "requires": { "commander": "^2.20.3", "cssfilter": "0.0.10" diff --git a/superset-frontend/package.json b/superset-frontend/package.json index a982865eb2fe4..141d9fe4005d7 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -197,7 +197,7 @@ "rison": "^0.1.1", "scroll-into-view-if-needed": "^2.2.28", "shortid": "^2.2.6", - "urijs": "^1.19.6", + "urijs": "^1.19.8", "use-immer": "^0.6.0", "use-query-params": "^1.1.9", "yargs": "^15.4.1" diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/package.json b/superset-frontend/plugins/legacy-preset-chart-deckgl/package.json index 47a53166fc024..b1ecf1d487e97 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/package.json +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/package.json @@ -37,8 +37,8 @@ "prop-types": "^15.6.0", "react-bootstrap-slider": "2.1.5", "underscore": "^1.8.3", - "urijs": "^1.18.10", - "xss": "^1.0.6" + "urijs": "^1.19.8", + "xss": "^1.0.10" }, "peerDependencies": { "@superset-ui/chart-controls": "*", diff --git a/superset-frontend/plugins/legacy-preset-chart-nvd3/package.json b/superset-frontend/plugins/legacy-preset-chart-nvd3/package.json index 9436a49540041..07e3152cf0f49 100644 --- a/superset-frontend/plugins/legacy-preset-chart-nvd3/package.json +++ b/superset-frontend/plugins/legacy-preset-chart-nvd3/package.json @@ -37,7 +37,7 @@ "moment": "^2.20.1", "nvd3-fork": "^2.0.5", "prop-types": "^15.6.2", - "urijs": "^1.18.10" + "urijs": "^1.19.8" }, "peerDependencies": { "@superset-ui/chart-controls": "*", diff --git a/superset-frontend/plugins/plugin-chart-table/package.json b/superset-frontend/plugins/plugin-chart-table/package.json index 6913d54a137d6..0577c731a6ba5 100644 --- a/superset-frontend/plugins/plugin-chart-table/package.json +++ b/superset-frontend/plugins/plugin-chart-table/package.json @@ -35,7 +35,7 @@ "memoize-one": "^5.1.1", "react-table": "^7.6.3", "regenerator-runtime": "^0.13.7", - "xss": "^1.0.8" + "xss": "^1.0.10" }, "peerDependencies": { "@types/react": "*", From f497c1900e896a45daac66cf35776d9ad343f69e Mon Sep 17 00:00:00 2001 From: Brian Nguyen Date: Tue, 1 Mar 2022 11:59:31 -0500 Subject: [PATCH 33/37] refactor: converted QueryAutoRefresh to functional component (#18179) * converted file to functional component * refactor: convert class component to functional component * refactor: convert class component to functional" * Working on converting the shouldCheckForQueries test to RTL * Working on converting first test to RTL * Working on first test for queryAutoRefresh * Finished Tests, pushing for review * Cleaned up comments and console logs Co-authored-by: Josue Lugaro --- .../QueryAutoRefresh.test.jsx | 40 ++++----- .../components/QueryAutoRefresh/index.jsx | 86 +++++++++---------- 2 files changed, 59 insertions(+), 67 deletions(-) diff --git a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx index 06bf187e1185a..93cea6d08d65f 100644 --- a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx +++ b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx @@ -17,12 +17,14 @@ * under the License. */ import React from 'react'; -import { shallow } from 'enzyme'; -import sinon from 'sinon'; +import { render } from 'spec/helpers/testing-library'; +import { ThemeProvider, supersetTheme } from '@superset-ui/core'; import thunk from 'redux-thunk'; import configureStore from 'redux-mock-store'; import QueryAutoRefresh from 'src/SqlLab/components/QueryAutoRefresh'; import { initialState, runningQuery } from 'src/SqlLab/fixtures'; +import fetchMock from 'fetch-mock'; +import * as actions from 'src/SqlLab/actions/sqlLab'; describe('QueryAutoRefresh', () => { const middlewares = [thunk]; @@ -38,31 +40,29 @@ describe('QueryAutoRefresh', () => { sqlLab, }; const store = mockStore(state); - const getWrapper = () => - shallow() - .dive() - .dive(); - let wrapper; + const setup = (overrides = {}) => ( + + + + ); + + const mockFetch = fetchMock.get('glob:*/superset/queries/*', {}); it('shouldCheckForQueries', () => { - wrapper = getWrapper(); - expect(wrapper.instance().shouldCheckForQueries()).toBe(true); + render(setup(), { + useRedux: true, + }); + + expect(mockFetch.called()).toBe(true); }); it('setUserOffline', () => { - wrapper = getWrapper(); - const spy = sinon.spy(wrapper.instance().props.actions, 'setUserOffline'); + const spy = jest.spyOn(actions, 'setUserOffline'); - // state not changed - wrapper.setState({ - offline: false, + render(setup(), { + useRedux: true, }); - expect(spy.called).toBe(false); - // state is changed - wrapper.setState({ - offline: true, - }); - expect(spy.callCount).toBe(1); + expect(spy).toHaveBeenCalled(); }); }); diff --git a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx index b54936b691efe..43f6c5d8a7d6e 100644 --- a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx +++ b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import React from 'react'; +import { useState, useEffect } from 'react'; import PropTypes from 'prop-types'; import { bindActionCreators } from 'redux'; import { connect } from 'react-redux'; @@ -28,31 +28,12 @@ const QUERY_UPDATE_BUFFER_MS = 5000; const MAX_QUERY_AGE_TO_POLL = 21600000; const QUERY_TIMEOUT_LIMIT = 10000; -class QueryAutoRefresh extends React.PureComponent { - constructor(props) { - super(props); - this.state = { - offline: props.offline, - }; - } - - UNSAFE_componentWillMount() { - this.startTimer(); - } - - componentDidUpdate(prevProps) { - if (prevProps.offline !== this.state.offline) { - this.props.actions.setUserOffline(this.state.offline); - } - } - - componentWillUnmount() { - this.stopTimer(); - } +function QueryAutoRefresh({ offline, queries, queriesLastUpdate, actions }) { + const [offlineState, setOfflineState] = useState(offline); + let timer = null; - shouldCheckForQueries() { + const shouldCheckForQueries = () => { // if there are started or running queries, this method should return true - const { queries } = this.props; const now = new Date().getTime(); const isQueryRunning = q => ['running', 'started', 'pending', 'fetching'].indexOf(q.state) >= 0; @@ -60,46 +41,57 @@ class QueryAutoRefresh extends React.PureComponent { return Object.values(queries).some( q => isQueryRunning(q) && now - q.startDttm < MAX_QUERY_AGE_TO_POLL, ); - } - - startTimer() { - if (!this.timer) { - this.timer = setInterval(this.stopwatch.bind(this), QUERY_UPDATE_FREQ); - } - } - - stopTimer() { - clearInterval(this.timer); - this.timer = null; - } + }; - stopwatch() { + const stopwatch = () => { // only poll /superset/queries/ if there are started or running queries - if (this.shouldCheckForQueries()) { + if (shouldCheckForQueries()) { SupersetClient.get({ endpoint: `/superset/queries/${ - this.props.queriesLastUpdate - QUERY_UPDATE_BUFFER_MS + queriesLastUpdate - QUERY_UPDATE_BUFFER_MS }`, timeout: QUERY_TIMEOUT_LIMIT, }) .then(({ json }) => { if (Object.keys(json).length > 0) { - this.props.actions.refreshQueries(json); + actions.refreshQueries(json); } - this.setState({ offline: false }); + + setOfflineState(false); }) .catch(() => { - this.setState({ offline: true }); + setOfflineState(true); }); } else { - this.setState({ offline: false }); + setOfflineState(false); } - } + }; + + const startTimer = () => { + if (!timer) { + timer = setInterval(stopwatch(), QUERY_UPDATE_FREQ); + } + }; + + const stopTimer = () => { + clearInterval(timer); + timer = null; + }; + + useEffect(() => { + startTimer(); + return () => { + stopTimer(); + }; + }, []); - render() { - return null; - } + useEffect(() => { + actions.setUserOffline(offlineState); + }, [offlineState]); + + return null; } + QueryAutoRefresh.propTypes = { offline: PropTypes.bool.isRequired, queries: PropTypes.object.isRequired, From 563cce1e29dd54714d34dbc4ec196823c395ff2d Mon Sep 17 00:00:00 2001 From: Srini Kadamati Date: Tue, 1 Mar 2022 12:19:33 -0500 Subject: [PATCH 34/37] docs: renamed yugabytedb to yugabyte as per request by Yugabyte team (#18983) Co-authored-by: Srinivasa Kadamati --- docs/docs/databases/yugabyte.mdx | 6 +++--- docs/src/resources/data.js | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/docs/databases/yugabyte.mdx b/docs/docs/databases/yugabyte.mdx index 431ba1e2f178c..8a08c0a833a19 100644 --- a/docs/docs/databases/yugabyte.mdx +++ b/docs/docs/databases/yugabyte.mdx @@ -1,13 +1,13 @@ --- -title: Postgres +title: Yugabyte hide_title: true sidebar_position: 38 version: 1 --- -## YugabyteDB +## Yugabyte -[YugabyteDB](https://www.yugabyte.com/) is a distributed SQL database built on top of PostgreSQL. +[Yugabyte](https://www.yugabyte.com/) is a distributed SQL database built on top of PostgreSQL. Note that, if you're using docker-compose, the Postgres connector library [psycopg2](https://www.psycopg.org/docs/) diff --git a/docs/src/resources/data.js b/docs/src/resources/data.js index 3c0cf718a4b16..fbf33de2b0d03 100644 --- a/docs/src/resources/data.js +++ b/docs/src/resources/data.js @@ -139,7 +139,7 @@ export const Databases = [ imgName: 'teradata.png' }, { - title: 'YugabyteDB', + title: 'Yugabyte', href: "www.yugabyte.com", imgName: 'yugabyte.png' } From 5fc7adb55b364c2f0f72a7b37f73590038936bf4 Mon Sep 17 00:00:00 2001 From: Craig Rueda Date: Tue, 1 Mar 2022 09:47:13 -0800 Subject: [PATCH 35/37] fix(packages): Fixing users packages (#18973) * Fixing users packages * Linting me() fn * Fixing tests / moving packages * Fixing imports --- superset/initialization/__init__.py | 2 +- superset/views/users/__init__.py | 17 +++++++++++++++++ superset/{ => views}/users/api.py | 2 +- superset/{ => views}/users/schemas.py | 0 tests/integration_tests/security_tests.py | 2 +- tests/integration_tests/users/__init__.py | 17 +++++++++++++++++ 6 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 superset/views/users/__init__.py rename superset/{ => views}/users/api.py (98%) rename superset/{ => views}/users/schemas.py (100%) create mode 100644 tests/integration_tests/users/__init__.py diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index 8c0e0a3b44478..59e204d6d65e8 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -50,7 +50,6 @@ ) from superset.security import SupersetSecurityManager from superset.typing import FlaskResponse -from superset.users.api import CurrentUserRestApi from superset.utils.core import pessimistic_connection_handling from superset.utils.log import DBEventLogger, get_event_logger_from_cfg_value @@ -195,6 +194,7 @@ def init_views(self) -> None: TabStateView, ) from superset.views.tags import TagView + from superset.views.users.api import CurrentUserRestApi # # Setup API views diff --git a/superset/views/users/__init__.py b/superset/views/users/__init__.py new file mode 100644 index 0000000000000..fd9417fe5c1e9 --- /dev/null +++ b/superset/views/users/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/users/api.py b/superset/views/users/api.py similarity index 98% rename from superset/users/api.py rename to superset/views/users/api.py index 7d52056a8f070..524a382b0679d 100644 --- a/superset/users/api.py +++ b/superset/views/users/api.py @@ -31,7 +31,7 @@ class CurrentUserRestApi(BaseApi): @expose("/", methods=["GET"]) @safe - def me(self) -> Response: + def get_me(self) -> Response: """Get the user object corresponding to the agent making the request --- get: diff --git a/superset/users/schemas.py b/superset/views/users/schemas.py similarity index 100% rename from superset/users/schemas.py rename to superset/views/users/schemas.py diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py index b311612fcd2b4..2ebced8069a3d 100644 --- a/tests/integration_tests/security_tests.py +++ b/tests/integration_tests/security_tests.py @@ -905,7 +905,7 @@ def test_views_are_secured(self): ["LocaleView", "index"], ["AuthDBView", "login"], ["AuthDBView", "logout"], - ["CurrentUserRestApi", "me"], + ["CurrentUserRestApi", "get_me"], ["Dashboard", "embedded"], ["R", "index"], ["Superset", "log"], diff --git a/tests/integration_tests/users/__init__.py b/tests/integration_tests/users/__init__.py new file mode 100644 index 0000000000000..fd9417fe5c1e9 --- /dev/null +++ b/tests/integration_tests/users/__init__.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. From 150fd0dfd395e7e55c4e88957be04daf7be16666 Mon Sep 17 00:00:00 2001 From: AAfghahi <48933336+AAfghahi@users.noreply.github.com> Date: Tue, 1 Mar 2022 12:47:25 -0500 Subject: [PATCH 36/37] added datasource legacy editor flag (#18970) --- superset/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset/config.py b/superset/config.py index 889cdbe7ebaf7..bad83615ce3ed 100644 --- a/superset/config.py +++ b/superset/config.py @@ -362,7 +362,7 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]: # With Superset 2.0, we are updating the default so that the legacy datasource # editor no longer shows. Currently this is set to false so that the editor # option does show, but we will be depreciating it. - "DISABLE_LEGACY_DATASOURCE_EDITOR": False, + "DISABLE_LEGACY_DATASOURCE_EDITOR": True, # For some security concerns, you may need to enforce CSRF protection on # all query request to explore_json endpoint. In Superset, we use # `flask-csrf `_ add csrf protection From bd63a1bd98c1faf152205b3b862119a1c59b2f05 Mon Sep 17 00:00:00 2001 From: Ville Brofeldt <33317356+villebro@users.noreply.github.com> Date: Wed, 2 Mar 2022 07:44:36 +0200 Subject: [PATCH 37/37] fix(chart): deprecate persisting url_params (#18960) * fix(chart): deprecate peristing url_params * remove duplicated backend logic * use omitBy * simplify omit --- UPDATING.md | 2 ++ .../src/explore/components/SaveModal.test.jsx | 12 +++---- .../src/explore/components/SaveModal.tsx | 9 ++++-- .../src/explore/exploreUtils/formData.test.ts | 28 ++++++++++++++++ .../src/explore/exploreUtils/formData.ts | 18 +++++++---- superset/charts/commands/export.py | 2 +- superset/views/core.py | 14 ++++++-- tests/integration_tests/charts/api_tests.py | 2 +- .../charts/commands_tests.py | 32 ++++++++++++++++++- 9 files changed, 99 insertions(+), 20 deletions(-) create mode 100644 superset-frontend/src/explore/exploreUtils/formData.test.ts diff --git a/UPDATING.md b/UPDATING.md index 667a47ca991f0..e00532fb4eddc 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -42,6 +42,8 @@ assists people when migrating to a new version. ### Deprecations +- [18960](https://github.com/apache/superset/pull/18960): Persisting URL params in chart metadata is no longer supported. To set a default value for URL params in Jinja code, use the optional second argument: `url_param("my-param", "my-default-value")`. + ### Other - [17589](https://github.com/apache/incubator-superset/pull/17589): It is now possible to limit access to users' recent activity data by setting the `ENABLE_BROAD_ACTIVITY_ACCESS` config flag to false, or customizing the `raise_for_user_activity_access` method in the security manager. diff --git a/superset-frontend/src/explore/components/SaveModal.test.jsx b/superset-frontend/src/explore/components/SaveModal.test.jsx index 8d431da973c83..b25efdb773a8b 100644 --- a/superset-frontend/src/explore/components/SaveModal.test.jsx +++ b/superset-frontend/src/explore/components/SaveModal.test.jsx @@ -64,7 +64,7 @@ describe('SaveModal', () => { } return arg; }), - form_data: { datasource: '107__table' }, + form_data: { datasource: '107__table', url_params: { foo: 'bar' } }, }; const mockEvent = { target: { @@ -168,11 +168,11 @@ describe('SaveModal', () => { defaultProps.actions.saveSlice.restore(); }); - it('should save slice', () => { + it('should save slice without url_params in form_data', () => { const wrapper = getWrapper(); wrapper.instance().saveOrOverwrite(true); const { args } = defaultProps.actions.saveSlice.getCall(0); - expect(args[0]).toEqual(defaultProps.form_data); + expect(args[0]).toEqual({ datasource: '107__table' }); }); it('existing dashboard', () => { @@ -219,7 +219,7 @@ describe('SaveModal', () => { defaultProps.actions.saveSlice().then(() => { expect(window.location.assign.callCount).toEqual(1); expect(window.location.assign.getCall(0).args[0]).toEqual( - 'http://localhost/mock_dashboard/', + 'http://localhost/mock_dashboard/?foo=bar', ); done(); }); @@ -235,7 +235,7 @@ describe('SaveModal', () => { defaultProps.actions.saveSlice().then(() => { expect(window.location.assign.callCount).toEqual(1); expect(window.location.assign.getCall(0).args[0]).toEqual( - '/mock_slice/', + '/mock_slice/?foo=bar', ); done(); }); @@ -248,7 +248,7 @@ describe('SaveModal', () => { defaultProps.actions.saveSlice().then(() => { expect(window.location.assign.callCount).toEqual(1); expect(window.location.assign.getCall(0).args[0]).toEqual( - '/mock_slice/', + '/mock_slice/?foo=bar', ); done(); }); diff --git a/superset-frontend/src/explore/components/SaveModal.tsx b/superset-frontend/src/explore/components/SaveModal.tsx index 292378290e98c..b5c47145e0c97 100644 --- a/superset-frontend/src/explore/components/SaveModal.tsx +++ b/superset-frontend/src/explore/components/SaveModal.tsx @@ -138,9 +138,10 @@ class SaveModal extends React.Component { sliceParams.slice_name = this.state.newSliceName; sliceParams.save_to_dashboard_id = this.state.saveToDashboardId; sliceParams.new_dashboard_name = this.state.newDashboardName; + const { url_params, ...formData } = this.props.form_data || {}; this.props.actions - .saveSlice(this.props.form_data, sliceParams) + .saveSlice(formData, sliceParams) .then((data: JsonObject) => { if (data.dashboard_id === null) { sessionStorage.removeItem(SK_DASHBOARD_ID); @@ -148,7 +149,11 @@ class SaveModal extends React.Component { sessionStorage.setItem(SK_DASHBOARD_ID, data.dashboard_id); } // Go to new slice url or dashboard url - const url = gotodash ? data.dashboard_url : data.slice.slice_url; + let url = gotodash ? data.dashboard_url : data.slice.slice_url; + if (url_params) { + const prefix = url.includes('?') ? '&' : '?'; + url = `${url}${prefix}${new URLSearchParams(url_params).toString()}`; + } window.location.assign(url); }); this.props.onHide(); diff --git a/superset-frontend/src/explore/exploreUtils/formData.test.ts b/superset-frontend/src/explore/exploreUtils/formData.test.ts new file mode 100644 index 0000000000000..f14455b51bd8e --- /dev/null +++ b/superset-frontend/src/explore/exploreUtils/formData.test.ts @@ -0,0 +1,28 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { sanitizeFormData } from './formData'; + +test('sanitizeFormData removes temporary control values', () => { + expect( + sanitizeFormData({ + url_params: { foo: 'bar' }, + metrics: ['foo', 'bar'], + }), + ).toEqual({ metrics: ['foo', 'bar'] }); +}); diff --git a/superset-frontend/src/explore/exploreUtils/formData.ts b/superset-frontend/src/explore/exploreUtils/formData.ts index 38637c1019623..9987b5d8cfa76 100644 --- a/superset-frontend/src/explore/exploreUtils/formData.ts +++ b/superset-frontend/src/explore/exploreUtils/formData.ts @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +import { omit } from 'lodash'; import { SupersetClient, JsonObject } from '@superset-ui/core'; type Payload = { @@ -24,6 +25,11 @@ type Payload = { chart_id?: number; }; +const TEMPORARY_CONTROLS = ['url_params']; + +export const sanitizeFormData = (formData: JsonObject): JsonObject => + omit(formData, TEMPORARY_CONTROLS); + const assembleEndpoint = (key?: string, tabId?: string) => { let endpoint = 'api/v1/explore/form_data'; if (key) { @@ -37,12 +43,12 @@ const assembleEndpoint = (key?: string, tabId?: string) => { const assemblePayload = ( datasetId: number, - form_data: JsonObject, + formData: JsonObject, chartId?: number, ) => { const payload: Payload = { dataset_id: datasetId, - form_data: JSON.stringify(form_data), + form_data: JSON.stringify(sanitizeFormData(formData)), }; if (chartId) { payload.chart_id = chartId; @@ -52,23 +58,23 @@ const assemblePayload = ( export const postFormData = ( datasetId: number, - form_data: JsonObject, + formData: JsonObject, chartId?: number, tabId?: string, ): Promise => SupersetClient.post({ endpoint: assembleEndpoint(undefined, tabId), - jsonPayload: assemblePayload(datasetId, form_data, chartId), + jsonPayload: assemblePayload(datasetId, formData, chartId), }).then(r => r.json.key); export const putFormData = ( datasetId: number, key: string, - form_data: JsonObject, + formData: JsonObject, chartId?: number, tabId?: string, ): Promise => SupersetClient.put({ endpoint: assembleEndpoint(key, tabId), - jsonPayload: assemblePayload(datasetId, form_data, chartId), + jsonPayload: assemblePayload(datasetId, formData, chartId), }).then(r => r.json.message); diff --git a/superset/charts/commands/export.py b/superset/charts/commands/export.py index 960fd87762b6d..35f70a82eab65 100644 --- a/superset/charts/commands/export.py +++ b/superset/charts/commands/export.py @@ -34,7 +34,7 @@ # keys present in the standard export that are not needed -REMOVE_KEYS = ["datasource_type", "datasource_name", "query_context"] +REMOVE_KEYS = ["datasource_type", "datasource_name", "query_context", "url_params"] class ExportChartsCommand(ExportModelsCommand): diff --git a/superset/views/core.py b/superset/views/core.py index a51cec56216fe..b4b8365a58200 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -742,7 +742,7 @@ def explore( form_data_key = request.args.get("form_data_key") if form_data_key: - parameters = CommandParameters(actor=g.user, key=form_data_key,) + parameters = CommandParameters(actor=g.user, key=form_data_key) value = GetFormDataCommand(parameters).run() initial_form_data = json.loads(value) if value else {} @@ -751,10 +751,18 @@ def explore( dataset_id = request.args.get("dataset_id") if slice_id: initial_form_data["slice_id"] = slice_id - flash(_("Form data not found in cache, reverting to chart metadata.")) + if form_data_key: + flash( + _("Form data not found in cache, reverting to chart metadata.") + ) elif dataset_id: initial_form_data["datasource"] = f"{dataset_id}__table" - flash(_("Form data not found in cache, reverting to dataset metadata.")) + if form_data_key: + flash( + _( + "Form data not found in cache, reverting to dataset metadata." + ) + ) form_data, slc = get_form_data( use_slice_data=True, initial_form_data=initial_form_data diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index c688044eb51aa..65ed10fe1376f 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -84,7 +84,7 @@ def create_charts(self): charts = [] admin = self.get_user("admin") for cx in range(CHARTS_FIXTURE_COUNT - 1): - charts.append(self.insert_chart(f"name{cx}", [admin.id], 1,)) + charts.append(self.insert_chart(f"name{cx}", [admin.id], 1)) fav_charts = [] for cx in range(round(CHARTS_FIXTURE_COUNT / 2)): fav_star = FavStar( diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py index 4e8ffa1af7706..bf4fe4fe79bd4 100644 --- a/tests/integration_tests/charts/commands_tests.py +++ b/tests/integration_tests/charts/commands_tests.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. import json -from datetime import datetime from unittest.mock import patch import pytest @@ -23,6 +22,7 @@ from flask import g from superset import db, security_manager +from superset.charts.commands.create import CreateChartCommand from superset.charts.commands.exceptions import ChartNotFoundError from superset.charts.commands.export import ExportChartsCommand from superset.charts.commands.importers.v1 import ImportChartsCommand @@ -329,6 +329,36 @@ def test_import_v1_chart_validation(self): } +class TestChartsCreateCommand(SupersetTestCase): + @patch("superset.views.base.g") + @patch("superset.security.manager.g") + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_create_v1_response(self, mock_sm_g, mock_g): + """Test that the create chart command creates a chart""" + actor = security_manager.find_user(username="admin") + mock_g.user = mock_sm_g.user = actor + chart_data = { + "slice_name": "new chart", + "description": "new description", + "owners": [actor.id], + "viz_type": "new_viz_type", + "params": json.dumps({"viz_type": "new_viz_type"}), + "cache_timeout": 1000, + "datasource_id": 1, + "datasource_type": "table", + } + command = CreateChartCommand(actor, chart_data) + chart = command.run() + chart = db.session.query(Slice).get(chart.id) + assert chart.viz_type == "new_viz_type" + json_params = json.loads(chart.params) + assert json_params == {"viz_type": "new_viz_type"} + assert chart.slice_name == "new chart" + assert chart.owners == [actor] + db.session.delete(chart) + db.session.commit() + + class TestChartsUpdateCommand(SupersetTestCase): @patch("superset.views.base.g") @patch("superset.security.manager.g")