Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[flake8] Resolving C??? errors #3787

Merged
merged 1 commit into from
Nov 8, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion superset/connectors/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ def __repr__(self):

num_types = (
'DOUBLE', 'FLOAT', 'INT', 'BIGINT',
'LONG', 'REAL', 'NUMERIC', 'DECIMAL'
'LONG', 'REAL', 'NUMERIC', 'DECIMAL',
)
date_types = ('DATE', 'TIME', 'DATETIME')
str_types = ('VARCHAR', 'STRING', 'CHAR')
Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/connector_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def get_eager_datasource(cls, session, datasource_type, datasource_id):
session.query(datasource_class)
.options(
subqueryload(datasource_class.columns),
subqueryload(datasource_class.metrics)
subqueryload(datasource_class.metrics),
)
.filter_by(id=datasource_id)
.one()
Expand Down
40 changes: 20 additions & 20 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ class DruidColumn(Model, BaseColumn):
export_fields = (
'datasource_name', 'column_name', 'is_active', 'type', 'groupby',
'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
'description', 'dimension_spec_json'
'description', 'dimension_spec_json',
)

def __repr__(self):
Expand All @@ -253,7 +253,7 @@ def get_metrics(self):
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
json=json.dumps({'type': 'count', 'name': 'count'})
json=json.dumps({'type': 'count', 'name': 'count'}),
)
# Somehow we need to reassign this for UDAFs
if self.type in ('DOUBLE', 'FLOAT'):
Expand All @@ -269,7 +269,7 @@ def get_metrics(self):
metric_type='sum',
verbose_name='SUM({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
'type': mt, 'name': name, 'fieldName': self.column_name}),
)

if self.avg and self.is_num:
Expand All @@ -280,7 +280,7 @@ def get_metrics(self):
metric_type='avg',
verbose_name='AVG({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
'type': mt, 'name': name, 'fieldName': self.column_name}),
)

if self.min and self.is_num:
Expand All @@ -291,7 +291,7 @@ def get_metrics(self):
metric_type='min',
verbose_name='MIN({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
'type': mt, 'name': name, 'fieldName': self.column_name}),
)
if self.max and self.is_num:
mt = corrected_type.lower() + 'Max'
Expand All @@ -301,7 +301,7 @@ def get_metrics(self):
metric_type='max',
verbose_name='MAX({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
'type': mt, 'name': name, 'fieldName': self.column_name}),
)
if self.count_distinct:
name = 'count_distinct__' + self.column_name
Expand All @@ -313,8 +313,8 @@ def get_metrics(self):
json=json.dumps({
'type': self.type,
'name': name,
'fieldName': self.column_name
})
'fieldName': self.column_name,
}),
)
else:
metrics[name] = DruidMetric(
Expand All @@ -324,7 +324,7 @@ def get_metrics(self):
json=json.dumps({
'type': 'cardinality',
'name': name,
'fieldNames': [self.column_name]})
'fieldNames': [self.column_name]}),
)
return metrics

Expand Down Expand Up @@ -372,7 +372,7 @@ class DruidMetric(Model, BaseMetric):

export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
'json', 'description', 'is_restricted', 'd3format',
)

@property
Expand All @@ -392,7 +392,7 @@ def perm(self):
return (
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
).format(obj=self,
parent_name=self.datasource.full_name
parent_name=self.datasource.full_name,
) if self.datasource else None

@classmethod
Expand Down Expand Up @@ -434,7 +434,7 @@ class DruidDatasource(Model, BaseDatasource):

export_fields = (
'datasource_name', 'is_hidden', 'description', 'default_endpoint',
'cluster_name', 'offset', 'cache_timeout', 'params'
'cluster_name', 'offset', 'cache_timeout', 'params',
)

@property
Expand Down Expand Up @@ -491,7 +491,7 @@ def time_column_grains(self):
'week', 'week_starting_sunday', 'week_ending_saturday',
'month',
],
"time_grains": ['now']
"time_grains": ['now'],
}

def __repr__(self):
Expand Down Expand Up @@ -815,11 +815,11 @@ def recursive_get_fields(_conf):
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
output_name=mconf.get('name', ''),
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name')
mconf.get('name'),
)
elif mconf.get('type') == 'arithmetic':
post_aggs[metric_name] = Postaggregator(
Expand Down Expand Up @@ -936,7 +936,7 @@ def run_query( # noqa / druid

if rejected_metrics:
raise MetricPermException(
"Access to the metrics denied: " + ', '.join(rejected_metrics)
"Access to the metrics denied: " + ', '.join(rejected_metrics),
)

# the dimensions list with dimensionSpecs expanded
Expand Down Expand Up @@ -1155,18 +1155,18 @@ def get_filters(raw_filters, num_cols): # noqa
elif op == '>':
cond = Bound(
col, eq, None,
lowerStrict=True, alphaNumeric=is_numeric_col
lowerStrict=True, alphaNumeric=is_numeric_col,
)
elif op == '<':
cond = Bound(
col, None, eq,
upperStrict=True, alphaNumeric=is_numeric_col
upperStrict=True, alphaNumeric=is_numeric_col,
)

if filters:
filters = Filter(type="and", fields=[
cond,
filters
filters,
])
else:
filters = cond
Expand All @@ -1192,7 +1192,7 @@ def get_having_filters(self, raw_filters):
reversed_op_map = {
'!=': '==',
'>=': '<',
'<=': '>'
'<=': '>',
}

for flt in raw_filters:
Expand Down
4 changes: 2 additions & 2 deletions superset/connectors/druid/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from superset.views.base import (
BaseSupersetView, DatasourceFilter, DeleteMixin,
get_datasource_exist_error_mgs, ListWidgetWithCheckboxes, SupersetModelView,
validate_json
validate_json,
)
from . import models

Expand Down Expand Up @@ -184,7 +184,7 @@ class DruidDatasourceModelView(DatasourceModelView, DeleteMixin): # noqa
'filter_select_enabled', 'fetch_values_from',
'default_endpoint', 'offset', 'cache_timeout']
search_columns = (
'datasource_name', 'cluster', 'description', 'owner'
'datasource_name', 'cluster', 'description', 'owner',
)
add_columns = edit_columns
show_columns = add_columns + ['perm']
Expand Down
20 changes: 10 additions & 10 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class TableColumn(Model, BaseColumn):
'table_id', 'column_name', 'verbose_name', 'is_dttm', 'is_active',
'type', 'groupby', 'count_distinct', 'sum', 'avg', 'max', 'min',
'filterable', 'expression', 'description', 'python_date_format',
'database_expression'
'database_expression',
)

@property
Expand Down Expand Up @@ -262,7 +262,7 @@ def sql_url(self):
def time_column_grains(self):
return {
"time_columns": self.dttm_cols,
"time_grains": [grain.name for grain in self.database.grains()]
"time_grains": [grain.name for grain in self.database.grains()],
}

def get_col(self, col_name):
Expand Down Expand Up @@ -322,8 +322,8 @@ def get_query_str(self, query_obj):
sql = str(
qry.compile(
engine,
compile_kwargs={"literal_binds": True}
)
compile_kwargs={"literal_binds": True},
),
)
logging.info(sql)
sql = sqlparse.format(sql, reindent=True)
Expand Down Expand Up @@ -622,43 +622,43 @@ def fetch_metadata(self):
metric_name='sum__' + dbcol.column_name,
verbose_name='sum__' + dbcol.column_name,
metric_type='sum',
expression="SUM({})".format(quoted)
expression="SUM({})".format(quoted),
))
if dbcol.avg:
metrics.append(M(
metric_name='avg__' + dbcol.column_name,
verbose_name='avg__' + dbcol.column_name,
metric_type='avg',
expression="AVG({})".format(quoted)
expression="AVG({})".format(quoted),
))
if dbcol.max:
metrics.append(M(
metric_name='max__' + dbcol.column_name,
verbose_name='max__' + dbcol.column_name,
metric_type='max',
expression="MAX({})".format(quoted)
expression="MAX({})".format(quoted),
))
if dbcol.min:
metrics.append(M(
metric_name='min__' + dbcol.column_name,
verbose_name='min__' + dbcol.column_name,
metric_type='min',
expression="MIN({})".format(quoted)
expression="MIN({})".format(quoted),
))
if dbcol.count_distinct:
metrics.append(M(
metric_name='count_distinct__' + dbcol.column_name,
verbose_name='count_distinct__' + dbcol.column_name,
metric_type='count_distinct',
expression="COUNT(DISTINCT {})".format(quoted)
expression="COUNT(DISTINCT {})".format(quoted),
))
dbcol.type = datatype

metrics.append(M(
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
expression="COUNT(*)"
expression="COUNT(*)",
))

dbmetrics = db.session.query(M).filter(M.table_id == self.id).filter(
Expand Down
6 changes: 3 additions & 3 deletions superset/connectors/sqla/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
"(https://github.com/d3/d3-format/blob/master/README.md#format). "
"For instance, this default formatting applies in the Table "
"visualization and allow for different metric to use different "
"formats", True
"formats", True,
),
}
add_columns = edit_columns
Expand Down Expand Up @@ -189,13 +189,13 @@ class TableModelView(DatasourceModelView, DeleteMixin): # noqa
"markdown</a>"),
'sql': _(
"This fields acts a Superset view, meaning that Superset will "
"run a query against this string as a subquery."
"run a query against this string as a subquery.",
),
'fetch_values_predicate': _(
"Predicate applied when fetching distinct value to "
"populate the filter control component. Supports "
"jinja template syntax. Applies only when "
"`Enable Filter Select` is on."
"`Enable Filter Select` is on.",
),
'default_endpoint': _(
"Redirects to this endpoint when clicking on the table "
Expand Down
8 changes: 4 additions & 4 deletions superset/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def load_energy():
"viz_type": "sankey",
"where": ""
}
""")
"""),
)
misc_dash_slices.append(slc.slice_name)
merge_slice(slc)
Expand All @@ -119,7 +119,7 @@ def load_energy():
"viz_type": "directed_force",
"where": ""
}
""")
"""),
)
misc_dash_slices.append(slc.slice_name)
merge_slice(slc)
Expand All @@ -145,7 +145,7 @@ def load_energy():
"xscale_interval": "1",
"yscale_interval": "1"
}
""")
"""),
)
misc_dash_slices.append(slc.slice_name)
merge_slice(slc)
Expand Down Expand Up @@ -971,7 +971,7 @@ def load_country_map_data():
'2012': BigInteger,
'2013': BigInteger,
'2014': BigInteger,
'date': Date()
'date': Date(),
},
index=False)
print("Done loading table!")
Expand Down
2 changes: 1 addition & 1 deletion superset/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def columns(self):
column.update({
'is_date': True,
'is_dim': False,
'agg': None
'agg': None,
})
# 'agg' is optional attribute
if not column['agg']:
Expand Down
2 changes: 1 addition & 1 deletion superset/db_engine_specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,7 +486,7 @@ def extra_table_metadata(cls, database, table_name, schema_name):
'cols': cols,
'latest': {col_name: latest_part},
'partitionQuery': pql,
}
},
}

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion superset/db_engines/hive.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def fetch_logs(self, max_rows=1024,
operationHandle=self._operationHandle,
orientation=ttypes.TFetchOrientation.FETCH_NEXT,
maxRows=self.arraysize,
fetchType=1 # 0: results, 1: logs
fetchType=1, # 0: results, 1: logs
)
response = self._connection.client.FetchResults(req)
hive._check_status(response)
Expand Down
6 changes: 3 additions & 3 deletions superset/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def form_data(self):
form_data.update({
'slice_id': self.id,
'viz_type': self.viz_type,
'datasource': str(self.datasource_id) + '__' + self.datasource_type
'datasource': str(self.datasource_id) + '__' + self.datasource_type,
})
if self.cache_timeout:
form_data['cache_timeout'] = self.cache_timeout
Expand Down Expand Up @@ -301,7 +301,7 @@ def import_obj(cls, slc_to_import, import_time=None):
'dashboard_user', metadata,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('ab_user.id')),
Column('dashboard_id', Integer, ForeignKey('dashboards.id'))
Column('dashboard_id', Integer, ForeignKey('dashboards.id')),
)


Expand Down Expand Up @@ -687,7 +687,7 @@ def wrap_sql_limit(self, sql, limit=1000):
select('*')
.select_from(
TextAsFrom(text(sql), ['*'])
.alias('inner_qry')
.alias('inner_qry'),
).limit(limit)
)
return self.compile_sqla_query(qry)
Expand Down
Loading