Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: remove druid datasource from the config #19770

Merged
merged 7 commits into from
May 4, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions UPDATING.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ assists people when migrating to a new version.

### Breaking Changes

- [19770](https://github.com/apache/superset/pull/19770): As per SIPs 11 and 68, the native NoSQL Druid connector is deprecated and has been removed. Druid is still supported through SQLAlchemy via pydruid. The config keys `DRUID_IS_ACTIVE` and `DRUID_METADATA_LINKS_ENABLED` have also been removed.
eschutho marked this conversation as resolved.
Show resolved Hide resolved
- [19274](https://github.com/apache/superset/pull/19274): The `PUBLIC_ROLE_LIKE_GAMMA` config key has been removed, set `PUBLIC_ROLE_LIKE = "Gamma"` to have the same functionality.
- [19273](https://github.com/apache/superset/pull/19273): The `SUPERSET_CELERY_WORKERS` and `SUPERSET_WORKERS` config keys has been removed. Configure Celery directly using `CELERY_CONFIG` on Superset.
- [19262](https://github.com/apache/superset/pull/19262): Per [SIP-11](https://github.com/apache/superset/issues/6032) and [SIP-68](https://github.com/apache/superset/issues/14909) the native NoSQL Druid connector is deprecated and will no longer be supported. Druid SQL is still [supported](https://superset.apache.org/docs/databases/druid).
Expand Down
59 changes: 0 additions & 59 deletions superset-frontend/src/visualizations/FilterBox/controlPanel.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,36 +20,6 @@ import React from 'react';
import { t } from '@superset-ui/core';
import { sections } from '@superset-ui/chart-controls';

const appContainer = document.getElementById('app');
const bootstrapData = JSON.parse(appContainer.getAttribute('data-bootstrap'));
const druidIsActive = !!bootstrapData?.common?.conf?.DRUID_IS_ACTIVE;
const druidSection = druidIsActive
? [
[
{
name: 'show_druid_time_granularity',
config: {
type: 'CheckboxControl',
label: t('Show Druid granularity dropdown'),
default: false,
description: t('Check to include Druid granularity dropdown'),
},
},
],
[
{
name: 'show_druid_time_origin',
config: {
type: 'CheckboxControl',
label: t('Show Druid time origin'),
default: false,
description: t('Check to include time origin dropdown'),
},
},
],
]
: [];

export default {
controlPanelSections: [
sections.legacyTimeseriesTime,
Expand Down Expand Up @@ -96,35 +66,6 @@ export default {
},
},
],
[
{
name: 'show_sqla_time_granularity',
config: {
type: 'CheckboxControl',
label: druidIsActive
? t('Show SQL time grain dropdown')
: t('Show time grain dropdown'),
default: false,
description: druidIsActive
? t('Check to include SQL time grain dropdown')
: t('Check to include time grain dropdown'),
},
},
],
[
{
name: 'show_sqla_time_column',
config: {
type: 'CheckboxControl',
label: druidIsActive
? t('Show SQL time column')
: t('Show time column'),
default: false,
description: t('Check to include time column dropdown'),
},
},
],
...druidSection,
['adhoc_filters'],
],
},
Expand Down
34 changes: 0 additions & 34 deletions superset/cli/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import logging
import os
import sys
from datetime import datetime
from typing import Optional

import click
Expand Down Expand Up @@ -53,39 +52,6 @@ def set_database_uri(database_name: str, uri: str, skip_create: bool) -> None:
database_utils.get_or_create_db(database_name, uri, not skip_create)


@click.command()
@with_appcontext
@click.option(
"--datasource",
"-d",
help="Specify which datasource name to load, if "
"omitted, all datasources will be refreshed",
)
@click.option(
"--merge",
"-m",
is_flag=True,
default=False,
help="Specify using 'merge' property during operation. " "Default value is False.",
)
def refresh_druid(datasource: str, merge: bool) -> None:
"""Refresh druid datasources"""
# pylint: disable=import-outside-toplevel
from superset.connectors.druid.models import DruidCluster

session = db.session()

for cluster in session.query(DruidCluster).all():
try:
cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
except Exception as ex: # pylint: disable=broad-except
print("Error while processing cluster '{}'\n{}".format(cluster, str(ex)))
logger.exception(ex)
cluster.metadata_last_refreshed = datetime.now()
print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]")
session.commit()


@click.command()
@with_appcontext
def update_datasources_cache() -> None:
Expand Down
22 changes: 5 additions & 17 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,16 +258,6 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:
DRUID_TZ = tz.tzutc()
DRUID_ANALYSIS_TYPES = ["cardinality"]

# Legacy Druid NoSQL (native) connector
# Druid supports a SQL interface in its newer versions.
# Setting this flag to True enables the deprecated, API-based Druid
# connector. This feature may be removed at a future date.
DRUID_IS_ACTIVE = False

# If Druid is active whether to include the links to scan/refresh Druid datasources.
# This should be disabled if you are trying to wean yourself off of the Druid NoSQL
# connector.
DRUID_METADATA_LINKS_ENABLED = True

# ----------------------------------------------------
# AUTHENTICATION CONFIG
Expand Down Expand Up @@ -646,19 +636,12 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:

VIZ_TYPE_DENYLIST: List[str] = []

# ---------------------------------------------------
# List of data sources not to be refreshed in druid cluster
# ---------------------------------------------------

DRUID_DATA_SOURCE_DENYLIST: List[str] = []

# --------------------------------------------------
# Modules, datasources and middleware to be registered
# --------------------------------------------------
DEFAULT_MODULE_DS_MAP = OrderedDict(
[
("superset.connectors.sqla.models", ["SqlaTable"]),
("superset.connectors.druid.models", ["DruidDatasource"]),
]
)
ADDITIONAL_MODULE_DS_MAP: Dict[str, List[str]] = {}
Expand Down Expand Up @@ -984,8 +967,11 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name
# Provide a callable that receives a tracking_url and returns another
# URL. This is used to translate internal Hadoop job tracker URL
# into a proxied one


TRACKING_URL_TRANSFORMER = lambda x: x


# Interval between consecutive polls when using Hive Engine
HIVE_POLL_INTERVAL = int(timedelta(seconds=5).total_seconds())

Expand Down Expand Up @@ -1203,8 +1189,10 @@ def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument
# to allow mutating the object with this callback.
# This can be used to set any properties of the object based on naming
# conventions and such. You can find examples in the tests.

SQLA_TABLE_MUTATOR = lambda table: table


# Global async query config options.
# Requires GLOBAL_ASYNC_QUERIES feature flag to be enabled.
GLOBAL_ASYNC_QUERIES_REDIS_CONFIG = {
Expand Down
16 changes: 0 additions & 16 deletions superset/connectors/druid/__init__.py

This file was deleted.

Loading