Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

pylint: accept specific 2 character names by default #9460

Merged
merged 3 commits into from
Apr 8, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -115,10 +115,10 @@ evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / stateme
[BASIC]

# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_,d,e,v,o,l,x,ts,f
good-names=_,df,ex,f,i,id,j,k,l,o,pk,Run,ts,v,x

# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata,d,fd
bad-names=fd,foo,bar,baz,toto,tutu,tata

# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
Expand Down
30 changes: 15 additions & 15 deletions superset/charts/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,11 +170,11 @@ def post(self) -> Response:
try:
new_model = CreateChartCommand(g.user, item.data).run()
return self.response(201, id=new_model.id, result=item.data)
except ChartInvalidError as e:
return self.response_422(message=e.normalized_messages())
except ChartCreateFailedError as e:
logger.error(f"Error creating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ChartCreateFailedError as ex:
logger.error(f"Error creating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))

@expose("/<pk>", methods=["PUT"])
@protect()
Expand Down Expand Up @@ -237,11 +237,11 @@ def put( # pylint: disable=too-many-return-statements, arguments-differ
return self.response_404()
except ChartForbiddenError:
return self.response_403()
except ChartInvalidError as e:
return self.response_422(message=e.normalized_messages())
except ChartUpdateFailedError as e:
logger.error(f"Error updating model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except ChartInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ChartUpdateFailedError as ex:
logger.error(f"Error updating model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))

@expose("/<pk>", methods=["DELETE"])
@protect()
Expand Down Expand Up @@ -285,9 +285,9 @@ def delete(self, pk: int) -> Response: # pylint: disable=arguments-differ
return self.response_404()
except ChartForbiddenError:
return self.response_403()
except ChartDeleteFailedError as e:
logger.error(f"Error deleting model {self.__class__.__name__}: {e}")
return self.response_422(message=str(e))
except ChartDeleteFailedError as ex:
logger.error(f"Error deleting model {self.__class__.__name__}: {ex}")
return self.response_422(message=str(ex))

@expose("/", methods=["DELETE"])
@protect()
Expand Down Expand Up @@ -346,5 +346,5 @@ def bulk_delete(
return self.response_404()
except ChartForbiddenError:
return self.response_403()
except ChartBulkDeleteFailedError as e:
return self.response_422(message=str(e))
except ChartBulkDeleteFailedError as ex:
return self.response_422(message=str(ex))
4 changes: 2 additions & 2 deletions superset/charts/commands/bulk_delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ def run(self) -> None:
self.validate()
try:
ChartDAO.bulk_delete(self._models)
except DeleteFailedError as e:
logger.exception(e.exception)
except DeleteFailedError as ex:
logger.exception(ex.exception)
raise ChartBulkDeleteFailedError()

def validate(self) -> None:
Expand Down
12 changes: 6 additions & 6 deletions superset/charts/commands/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ def run(self) -> Model:
self.validate()
try:
chart = ChartDAO.create(self._properties)
except DAOCreateFailedError as e:
logger.exception(e.exception)
except DAOCreateFailedError as ex:
logger.exception(ex.exception)
raise ChartCreateFailedError()
return chart

Expand All @@ -60,8 +60,8 @@ def validate(self) -> None:
try:
datasource = get_datasource_by_id(datasource_id, datasource_type)
self._properties["datasource_name"] = datasource.name
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)

# Validate/Populate dashboards
dashboards = DashboardDAO.find_by_ids(dashboard_ids)
Expand All @@ -72,8 +72,8 @@ def validate(self) -> None:
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
if exceptions:
exception = ChartInvalidError()
exception.add_list(exceptions)
Expand Down
4 changes: 2 additions & 2 deletions superset/charts/commands/delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ def run(self) -> Model:
self.validate()
try:
chart = ChartDAO.delete(self._model)
except DAODeleteFailedError as e:
logger.exception(e.exception)
except DAODeleteFailedError as ex:
logger.exception(ex.exception)
raise ChartDeleteFailedError()
return chart

Expand Down
12 changes: 6 additions & 6 deletions superset/charts/commands/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ def run(self) -> Model:
self.validate()
try:
chart = ChartDAO.update(self._model, self._properties)
except DAOUpdateFailedError as e:
logger.exception(e.exception)
except DAOUpdateFailedError as ex:
logger.exception(ex.exception)
raise ChartUpdateFailedError()
return chart

Expand Down Expand Up @@ -84,8 +84,8 @@ def validate(self) -> None:
try:
datasource = get_datasource_by_id(datasource_id, datasource_type)
self._properties["datasource_name"] = datasource.name
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)

# Validate/Populate dashboards
dashboards = DashboardDAO.find_by_ids(dashboard_ids)
Expand All @@ -97,8 +97,8 @@ def validate(self) -> None:
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as e:
exceptions.append(e)
except ValidationError as ex:
exceptions.append(ex)
if exceptions:
exception = ChartInvalidError()
exception.add_list(exceptions)
Expand Down
4 changes: 2 additions & 2 deletions superset/charts/dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def bulk_delete(models: Optional[List[Slice]], commit: bool = True) -> None:
)
if commit:
db.session.commit()
except SQLAlchemyError as e:
except SQLAlchemyError as ex:
if commit:
db.session.rollback()
raise e
raise ex
18 changes: 9 additions & 9 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,9 +197,9 @@ def refresh_druid(datasource, merge):
for cluster in session.query(DruidCluster).all():
try:
cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge)
except Exception as e: # pylint: disable=broad-except
print("Error while processing cluster '{}'\n{}".format(cluster, str(e)))
logger.exception(e)
except Exception as ex: # pylint: disable=broad-except
print("Error while processing cluster '{}'\n{}".format(cluster, str(ex)))
logger.exception(ex)
cluster.metadata_last_refreshed = datetime.now()
print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]")
session.commit()
Expand Down Expand Up @@ -245,9 +245,9 @@ def import_dashboards(path, recursive, username):
try:
with file_.open() as data_stream:
dashboard_import_export.import_dashboards(db.session, data_stream)
except Exception as e: # pylint: disable=broad-except
except Exception as ex: # pylint: disable=broad-except
logger.error("Error when importing dashboard from file %s", file_)
logger.error(e)
logger.error(ex)


@superset.command()
Expand Down Expand Up @@ -317,9 +317,9 @@ def import_datasources(path, sync, recursive):
dict_import_export.import_from_dict(
db.session, yaml.safe_load(data_stream), sync=sync_array
)
except Exception as e: # pylint: disable=broad-except
except Exception as ex: # pylint: disable=broad-except
logger.error("Error when importing datasources from file %s", file_)
logger.error(e)
logger.error(ex)


@superset.command()
Expand Down Expand Up @@ -397,8 +397,8 @@ def update_datasources_cache():
database.get_all_view_names_in_database(
force=True, cache=True, cache_timeout=24 * 60 * 60
)
except Exception as e: # pylint: disable=broad-except
print("{}".format(str(e)))
except Exception as ex: # pylint: disable=broad-except
print("{}".format(str(ex)))


@superset.command()
Expand Down
22 changes: 10 additions & 12 deletions superset/common/query_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def get_query_result(self, query_object: QueryObject) -> Dict[str, Any]:
}

@staticmethod
def df_metrics_to_num( # pylint: disable=invalid-name,no-self-use
def df_metrics_to_num( # pylint: disable=no-self-use
df: pd.DataFrame, query_object: QueryObject
) -> None:
"""Converting metrics to numeric when pandas.read_sql cannot"""
Expand All @@ -122,9 +122,7 @@ def df_metrics_to_num( # pylint: disable=invalid-name,no-self-use
df[col] = pd.to_numeric(df[col], errors="coerce")

@staticmethod
def get_data( # pylint: disable=invalid-name,no-self-use
df: pd.DataFrame,
) -> List[Dict]:
def get_data(df: pd.DataFrame,) -> List[Dict]: # pylint: disable=no-self-use
return df.to_dict(orient="records")

def get_single_payload(self, query_obj: QueryObject) -> Dict[str, Any]:
Expand Down Expand Up @@ -197,10 +195,10 @@ def get_df_payload( # pylint: disable=too-many-locals,too-many-statements
status = utils.QueryStatus.SUCCESS
is_loaded = True
stats_logger.incr("loaded_from_cache")
except Exception as e: # pylint: disable=broad-except
logger.exception(e)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
logger.error(
"Error reading cache: %s", utils.error_msg_from_exception(e)
"Error reading cache: %s", utils.error_msg_from_exception(ex)
)
logger.info("Serving from cache")

Expand All @@ -216,10 +214,10 @@ def get_df_payload( # pylint: disable=too-many-locals,too-many-statements
if not self.force:
stats_logger.incr("loaded_from_source_without_force")
is_loaded = True
except Exception as e: # pylint: disable=broad-except
logger.exception(e)
except Exception as ex: # pylint: disable=broad-except
logger.exception(ex)
if not error_message:
error_message = "{}".format(e)
error_message = "{}".format(ex)
status = utils.QueryStatus.FAILED
stacktrace = utils.get_stacktrace()

Expand All @@ -234,11 +232,11 @@ def get_df_payload( # pylint: disable=too-many-locals,too-many-statements

stats_logger.incr("set_cache_key")
cache.set(cache_key, cache_binary, timeout=self.cache_timeout)
except Exception as e: # pylint: disable=broad-except
except Exception as ex: # pylint: disable=broad-except
# cache.set call can fail if the backend is down or if
# the key is too large or whatever other reasons
logger.warning("Could not cache key %s", cache_key)
logger.exception(e)
logger.exception(ex)
cache.delete(cache_key)
return {
"cache_key": cache_key,
Expand Down
6 changes: 3 additions & 3 deletions superset/connectors/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ class BaseDatasource(
# ---------------------------------------------------------------

# Columns
id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
description = Column(Text)
default_endpoint = Column(Text)
is_featured = Column(Boolean, default=False) # TODO deprecating
Expand Down Expand Up @@ -453,7 +453,7 @@ class BaseColumn(AuditMixinNullable, ImportMixin):

__tablename__: Optional[str] = None # {connector_name}_column

id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
column_name = Column(String(255), nullable=False)
verbose_name = Column(String(1024))
is_active = Column(Boolean, default=True)
Expand Down Expand Up @@ -526,7 +526,7 @@ class BaseMetric(AuditMixinNullable, ImportMixin):

__tablename__: Optional[str] = None # {connector_name}_metric

id = Column(Integer, primary_key=True) # pylint: disable=invalid-name
id = Column(Integer, primary_key=True)
metric_name = Column(String(255), nullable=False)
verbose_name = Column(String(1024))
metric_type = Column(String(32))
Expand Down
8 changes: 4 additions & 4 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -657,9 +657,9 @@ def latest_metadata(self):
merge=self.merge_flag,
analysisTypes=[],
)
except Exception as e:
except Exception as ex:
logger.warning("Failed first attempt to get latest segment")
logger.exception(e)
logger.exception(ex)
if not segment_metadata:
# if no segments in the past 7 days, look at all segments
lbound = datetime(1901, 1, 1).isoformat()[:10]
Expand All @@ -674,9 +674,9 @@ def latest_metadata(self):
merge=self.merge_flag,
analysisTypes=[],
)
except Exception as e:
except Exception as ex:
logger.warning("Failed 2nd attempt to get latest segment")
logger.exception(e)
logger.exception(ex)
if segment_metadata:
return segment_metadata[-1]["columns"]

Expand Down
10 changes: 5 additions & 5 deletions superset/connectors/druid/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,8 @@ def pre_update(self, col):
if col.dimension_spec_json:
try:
dimension_spec = json.loads(col.dimension_spec_json)
except ValueError as e:
raise ValueError("Invalid Dimension Spec JSON: " + str(e))
except ValueError as ex:
raise ValueError("Invalid Dimension Spec JSON: " + str(ex))
if not isinstance(dimension_spec, dict):
raise ValueError("Dimension Spec must be a JSON object")
if "outputName" not in dimension_spec:
Expand Down Expand Up @@ -374,15 +374,15 @@ def refresh_datasources(self, refresh_all=True):
valid_cluster = True
try:
cluster.refresh_datasources(refresh_all=refresh_all)
except Exception as e:
except Exception as ex:
valid_cluster = False
flash(
"Error while processing cluster '{}'\n{}".format(
cluster_name, utils.error_msg_from_exception(e)
cluster_name, utils.error_msg_from_exception(ex)
),
"danger",
)
logger.exception(e)
logger.exception(ex)
pass
if valid_cluster:
cluster.metadata_last_refreshed = datetime.now()
Expand Down
Loading