Skip to content

Commit

Permalink
refactor: migrate table chart to new API
Browse files Browse the repository at this point in the history
  • Loading branch information
ktmud committed Nov 27, 2020
1 parent 3578410 commit a1e87a8
Show file tree
Hide file tree
Showing 11 changed files with 60 additions and 71 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ describe('Dashboard form data', () => {
});
});

it('should apply url params and queryFields to slice requests', () => {
it('should apply url params to slice requests', () => {
const aliases = getChartAliases(dashboard.slices);
// wait and verify one-by-one
cy.wait(aliases).then(requests => {
Expand All @@ -48,7 +48,6 @@ describe('Dashboard form data', () => {
if (isLegacyResponse(responseBody)) {
const requestFormData = xhr.request.body;
const requestParams = JSON.parse(requestFormData.get('form_data'));
expect(requestParams).to.have.property('queryFields');
expect(requestParams.url_params).deep.eq(urlParams);
} else {
xhr.request.body.queries.forEach(query => {
Expand Down
12 changes: 0 additions & 12 deletions superset-frontend/spec/javascripts/explore/controlUtils_spec.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -198,18 +198,6 @@ describe('controlUtils', () => {
});
});

describe('queryFields', () => {
it('in formData', () => {
const controlsState = getAllControlsState('table', 'table', {}, {});
const formData = getFormDataFromControls(controlsState);
expect(formData.queryFields).toEqual({
all_columns: 'columns',
metric: 'metrics',
metrics: 'metrics',
});
});
});

describe('findControlItem', () => {
it('find control as a string', () => {
const controlItem = findControlItem(
Expand Down
2 changes: 0 additions & 2 deletions superset-frontend/spec/javascripts/explore/store_spec.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,10 @@ describe('store', () => {
const inputFormData = {
datasource: '11_table',
viz_type: 'table',
queryFields: staleQueryFields,
this_should_no_be_here: true,
};
const outputFormData = applyDefaultFormData(inputFormData);
expect(outputFormData.this_should_no_be_here).toBe(undefined);
expect(outputFormData.queryFields).not.toBe(staleQueryFields);
});
});
});
5 changes: 1 addition & 4 deletions superset-frontend/src/explore/controlUtils.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,10 @@ import { expandControlConfig } from '@superset-ui/chart-controls';
import * as SECTIONS from './controlPanels/sections';

export function getFormDataFromControls(controlsState) {
const formData = { queryFields: {} };
const formData = {};
Object.keys(controlsState).forEach(controlName => {
const control = controlsState[controlName];
formData[controlName] = control.value;
if (control.hasOwnProperty('queryField')) {
formData.queryFields[controlName] = control.queryField;
}
});
return formData;
}
Expand Down
3 changes: 0 additions & 3 deletions superset-frontend/src/explore/store.js
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,6 @@ export function applyDefaultFormData(inputFormData) {
formData[controlName] = inputFormData[controlName];
}
});

// always use dynamically generated queryFields
formData.queryFields = controlFormData.queryFields;
return formData;
}

Expand Down
2 changes: 2 additions & 0 deletions superset/common/query_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,8 @@ def get_single_payload(self, query_obj: QueryObject) -> Dict[str, Any]:
df = payload["df"]
status = payload["status"]
if status != utils.QueryStatus.FAILED:
payload["columns"] = list(df.columns)
payload["column_types"] = utils.serialize_dtypes(df.dtypes)
payload["data"] = self.get_data(df)
del payload["df"]

Expand Down
6 changes: 3 additions & 3 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def is_numeric(self) -> bool:
"""
db_engine_spec = self.table.database.db_engine_spec
return db_engine_spec.is_db_column_type_match(
self.type, utils.DbColumnType.NUMERIC
self.type, utils.GenericColumnType.NUMERIC
)

@property
Expand All @@ -199,7 +199,7 @@ def is_string(self) -> bool:
"""
db_engine_spec = self.table.database.db_engine_spec
return db_engine_spec.is_db_column_type_match(
self.type, utils.DbColumnType.STRING
self.type, utils.GenericColumnType.STRING
)

@property
Expand All @@ -214,7 +214,7 @@ def is_temporal(self) -> bool:
return self.is_dttm
db_engine_spec = self.table.database.db_engine_spec
return db_engine_spec.is_db_column_type_match(
self.type, utils.DbColumnType.TEMPORAL
self.type, utils.GenericColumnType.TEMPORAL
)

def get_sqla_col(self, label: Optional[str] = None) -> Column:
Expand Down
32 changes: 13 additions & 19 deletions superset/db_engine_specs/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,34 +157,28 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
max_column_name_length = 0
try_remove_schema_from_table_name = True # pylint: disable=invalid-name

# default matching patterns for identifying column types
db_column_types: Dict[utils.DbColumnType, Tuple[Pattern[Any], ...]] = {
utils.DbColumnType.NUMERIC: (
# default matching patterns to convert database specific column types to
# more generic types
db_column_types: Dict[utils.GenericColumnType, Tuple[Pattern[Any], ...]] = {
utils.GenericColumnType.NUMERIC: (
re.compile(r"BIT", re.IGNORECASE),
re.compile(r".*DOUBLE.*", re.IGNORECASE),
re.compile(r".*FLOAT.*", re.IGNORECASE),
re.compile(r".*INT.*", re.IGNORECASE),
re.compile(r".*NUMBER.*", re.IGNORECASE),
re.compile(
r".*(DOUBLE|FLOAT|INT|NUMBER|REAL|NUMERIC|DECIMAL|MONEY).*",
re.IGNORECASE,
),
re.compile(r".*LONG$", re.IGNORECASE),
re.compile(r".*REAL.*", re.IGNORECASE),
re.compile(r".*NUMERIC.*", re.IGNORECASE),
re.compile(r".*DECIMAL.*", re.IGNORECASE),
re.compile(r".*MONEY.*", re.IGNORECASE),
),
utils.DbColumnType.STRING: (
re.compile(r".*CHAR.*", re.IGNORECASE),
re.compile(r".*STRING.*", re.IGNORECASE),
re.compile(r".*TEXT.*", re.IGNORECASE),
utils.GenericColumnType.STRING: (
re.compile(r".*(CHAR|STRING|TEXT).*", re.IGNORECASE),
),
utils.DbColumnType.TEMPORAL: (
re.compile(r".*DATE.*", re.IGNORECASE),
re.compile(r".*TIME.*", re.IGNORECASE),
utils.GenericColumnType.TEMPORAL: (
re.compile(r".*(DATE|TIME).*", re.IGNORECASE),
),
}

@classmethod
def is_db_column_type_match(
cls, db_column_type: Optional[str], target_column_type: utils.DbColumnType
cls, db_column_type: Optional[str], target_column_type: utils.GenericColumnType
) -> bool:
"""
Check if a column type satisfies a pattern in a collection of regexes found in
Expand Down
4 changes: 2 additions & 2 deletions superset/result_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def __init__( # pylint: disable=too-many-locals,too-many-branches
if pa.types.is_nested(pa_data[i].type):
# TODO: revisit nested column serialization once nested types
# are added as a natively supported column type in Superset
# (superset.utils.core.DbColumnType).
# (superset.utils.core.GenericColumnType).
stringified_arr = stringify_values(array[column])
pa_data[i] = pa.array(stringified_arr.tolist())

Expand Down Expand Up @@ -182,7 +182,7 @@ def first_nonempty(items: List[Any]) -> Any:

def is_temporal(self, db_type_str: Optional[str]) -> bool:
return self.db_engine_spec.is_db_column_type_match(
db_type_str, utils.DbColumnType.TEMPORAL
db_type_str, utils.GenericColumnType.TEMPORAL
)

def data_type(self, col_name: str, pa_dtype: pa.DataType) -> Optional[str]:
Expand Down
20 changes: 17 additions & 3 deletions superset/utils/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate
from enum import Enum
from enum import Enum, IntEnum
from time import struct_time
from timeit import default_timer
from types import TracebackType
Expand Down Expand Up @@ -1424,6 +1424,19 @@ def get_column_names_from_metrics(metrics: List[Metric]) -> List[str]:
columns.append(column_name)
return columns

def serialize_dtypes(dtypes: List[np.dtype]) -> List[GenericColumnType]:
"""Serialize pandas/numpy dtypes to JavaScript types"""
mapping = {
'object': GenericColumnType.STRING,
'category': GenericColumnType.STRING,
'datetime64[ns]': GenericColumnType.TEMPORAL,
'int64': GenericColumnType.NUMERIC,
'in32': GenericColumnType.NUMERIC,
'float64': GenericColumnType.NUMERIC,
'float32': GenericColumnType.NUMERIC,
'bool': GenericColumnType.BOOLEAN,
}
return [mapping.get(str(x), GenericColumnType.STRING) for x in dtypes]

def indexed(
items: List[Any], key: Union[str, Callable[[Any], Any]]
Expand Down Expand Up @@ -1483,14 +1496,15 @@ class QuerySource(Enum):
SQL_LAB = 2


class DbColumnType(Enum):
class GenericColumnType(IntEnum):
"""
Generic database column type
Generic database column type that fits both frontend and backend.
"""

NUMERIC = 0
STRING = 1
TEMPORAL = 2
BOOLEAN = 3


class QueryMode(str, LenientEnum):
Expand Down
42 changes: 21 additions & 21 deletions tests/sqla_models_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from superset.db_engine_specs.druid import DruidEngineSpec
from superset.exceptions import QueryObjectValidationError
from superset.models.core import Database
from superset.utils.core import DbColumnType, get_example_database, FilterOperator
from superset.utils.core import GenericColumnType, get_example_database, FilterOperator

from .base_tests import SupersetTestCase

Expand Down Expand Up @@ -74,34 +74,34 @@ def test_temporal_varchar(self):
col.is_dttm = True
assert col.is_temporal is True

def test_db_column_types(self):
test_cases: Dict[str, DbColumnType] = {
def test_generic_column_types(self):
test_cases: Dict[str, GenericColumnType] = {
# string
"CHAR": DbColumnType.STRING,
"VARCHAR": DbColumnType.STRING,
"NVARCHAR": DbColumnType.STRING,
"STRING": DbColumnType.STRING,
"TEXT": DbColumnType.STRING,
"NTEXT": DbColumnType.STRING,
"CHAR": GenericColumnType.STRING,
"VARCHAR": GenericColumnType.STRING,
"NVARCHAR": GenericColumnType.STRING,
"STRING": GenericColumnType.STRING,
"TEXT": GenericColumnType.STRING,
"NTEXT": GenericColumnType.STRING,
# numeric
"INT": DbColumnType.NUMERIC,
"BIGINT": DbColumnType.NUMERIC,
"FLOAT": DbColumnType.NUMERIC,
"DECIMAL": DbColumnType.NUMERIC,
"MONEY": DbColumnType.NUMERIC,
"INT": GenericColumnType.NUMERIC,
"BIGINT": GenericColumnType.NUMERIC,
"FLOAT": GenericColumnType.NUMERIC,
"DECIMAL": GenericColumnType.NUMERIC,
"MONEY": GenericColumnType.NUMERIC,
# temporal
"DATE": DbColumnType.TEMPORAL,
"DATETIME": DbColumnType.TEMPORAL,
"TIME": DbColumnType.TEMPORAL,
"TIMESTAMP": DbColumnType.TEMPORAL,
"DATE": GenericColumnType.TEMPORAL,
"DATETIME": GenericColumnType.TEMPORAL,
"TIME": GenericColumnType.TEMPORAL,
"TIMESTAMP": GenericColumnType.TEMPORAL,
}

tbl = SqlaTable(table_name="col_type_test_tbl", database=get_example_database())
for str_type, db_col_type in test_cases.items():
col = TableColumn(column_name="foo", type=str_type, table=tbl)
self.assertEqual(col.is_temporal, db_col_type == DbColumnType.TEMPORAL)
self.assertEqual(col.is_numeric, db_col_type == DbColumnType.NUMERIC)
self.assertEqual(col.is_string, db_col_type == DbColumnType.STRING)
self.assertEqual(col.is_temporal, db_col_type == GenericColumnType.TEMPORAL)
self.assertEqual(col.is_numeric, db_col_type == GenericColumnType.NUMERIC)
self.assertEqual(col.is_string, db_col_type == GenericColumnType.STRING)

@patch("superset.jinja_context.g")
def test_extra_cache_keys(self, flask_g):
Expand Down

0 comments on commit a1e87a8

Please sign in to comment.