Skip to content

Commit

Permalink
style: upgrade black to latest version, use black default line length (
Browse files Browse the repository at this point in the history
…#521)

* style: upgrade black to latest version, use black default line length

* Upgrade to the latest version of black.

* Stop passing "--line-length" argument to black, to use default, like
other Google API libraries.

* Add some errors for flake8 to ignore, consistent with other Google API
Libraries.
  • Loading branch information
Chris Rossi authored Aug 27, 2020
1 parent fc0366a commit 448054f
Show file tree
Hide file tree
Showing 43 changed files with 458 additions and 1,182 deletions.
18 changes: 18 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

[flake8]
ignore = E203, E266, E501, W503
4 changes: 1 addition & 3 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,9 +158,7 @@
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "ndb.tex", "ndb Documentation", "Google LLC", "manual")
]
latex_documents = [(master_doc, "ndb.tex", "ndb Documentation", "Google LLC", "manual")]


# -- Options for manual page output ------------------------------------------
Expand Down
6 changes: 1 addition & 5 deletions google/cloud/ndb/_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,7 @@ def get_batch(batch_cls, options=None):
if options is not None:
options_key = tuple(
sorted(
(
(key, value)
for key, value in options.items()
if value is not None
)
((key, value) for key, value in options.items() if value is not None)
)
)
else:
Expand Down
7 changes: 3 additions & 4 deletions google/cloud/ndb/_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ class ContextCache(dict):

def get_and_validate(self, key):
"""Verify that the entity's key has not changed since it was added
to the cache. If it has changed, consider this a cache miss.
See issue 13. http://goo.gl/jxjOP"""
to the cache. If it has changed, consider this a cache miss.
See issue 13. http://goo.gl/jxjOP"""
entity = self[key] # May be None, meaning "doesn't exist".
if entity is None or entity._key == key:
return entity
Expand All @@ -58,8 +58,7 @@ def _future_result(result):


class _GlobalCacheBatch(object):
"""Abstract base for classes used to batch operations for the global cache.
"""
"""Abstract base for classes used to batch operations for the global cache."""

def full(self):
"""Indicates whether more work can be added to this batch.
Expand Down
28 changes: 7 additions & 21 deletions google/cloud/ndb/_datastore_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,7 @@ def lookup(key, options):
use_global_cache = context._use_global_cache(key, options)

if not (use_global_cache or use_datastore):
raise TypeError(
"use_global_cache and use_datastore can't both be False"
)
raise TypeError("use_global_cache and use_datastore can't both be False")

entity_pb = _NOT_FOUND
key_locked = False
Expand All @@ -160,9 +158,7 @@ def lookup(key, options):
if use_global_cache and not key_locked and entity_pb is not _NOT_FOUND:
expires = context._global_cache_timeout(key, options)
serialized = entity_pb.SerializeToString()
yield _cache.global_compare_and_swap(
cache_key, serialized, expires=expires
)
yield _cache.global_compare_and_swap(cache_key, serialized, expires=expires)

raise tasklets.Return(entity_pb)

Expand Down Expand Up @@ -257,9 +253,7 @@ def lookup_callback(self, rpc):
next_batch = _batch.get_batch(type(self), self.options)
for key in results.deferred:
todo_key = key.SerializeToString()
next_batch.todo.setdefault(todo_key, []).extend(
self.todo[todo_key]
)
next_batch.todo.setdefault(todo_key, []).extend(self.todo[todo_key])

# For all missing keys, set result to _NOT_FOUND and let callers decide
# how to handle
Expand Down Expand Up @@ -331,9 +325,7 @@ def get_read_options(options, default_read_consistency=None):
read_consistency = default_read_consistency

elif read_consistency is EVENTUAL:
raise ValueError(
"read_consistency must not be EVENTUAL when in transaction"
)
raise ValueError("read_consistency must not be EVENTUAL when in transaction")

return datastore_pb2.ReadOptions(
read_consistency=read_consistency, transaction=transaction
Expand Down Expand Up @@ -380,9 +372,7 @@ def put(entity, options):
use_global_cache = context._use_global_cache(entity.key, options)
use_datastore = context._use_datastore(entity.key, options)
if not (use_global_cache or use_datastore):
raise TypeError(
"use_global_cache and use_datastore can't both be False"
)
raise TypeError("use_global_cache and use_datastore can't both be False")

if not use_datastore and entity.key.is_partial:
raise TypeError("Can't store partial keys when use_datastore is False")
Expand Down Expand Up @@ -990,9 +980,7 @@ def _datastore_allocate_ids(keys, retries=None, timeout=None):
:class:`google.cloud.datastore_v1.datastore_pb2.AllocateIdsResponse`
"""
client = context_module.get_context().client
request = datastore_pb2.AllocateIdsRequest(
project_id=client.project, keys=keys
)
request = datastore_pb2.AllocateIdsRequest(project_id=client.project, keys=keys)

return make_call("AllocateIds", request, retries=retries, timeout=timeout)

Expand Down Expand Up @@ -1050,9 +1038,7 @@ def _datastore_begin_transaction(read_only, retries=None, timeout=None):
project_id=client.project, transaction_options=options
)

return make_call(
"BeginTransaction", request, retries=retries, timeout=timeout
)
return make_call("BeginTransaction", request, retries=retries, timeout=timeout)


@tasklets.tasklet
Expand Down
24 changes: 6 additions & 18 deletions google/cloud/ndb/_datastore_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,9 +311,7 @@ def _next_batch(self):
batch.more_results == MORE_RESULTS_TYPE_NOT_FINISHED
)

self._more_results_after_limit = (
batch.more_results == MORE_RESULTS_AFTER_LIMIT
)
self._more_results_after_limit = batch.more_results == MORE_RESULTS_AFTER_LIMIT

if more_results:
# Fix up query for next batch
Expand Down Expand Up @@ -538,9 +536,7 @@ def __init__(self, query, raw=False):
self._extra_projections = extra_projections

queries = [
query.copy(
filters=node, projection=projection, offset=None, limit=None
)
query.copy(filters=node, projection=projection, offset=None, limit=None)
for node in query.filters._nodes
]
self._result_sets = [iterate(_query, raw=True) for _query in queries]
Expand Down Expand Up @@ -625,10 +621,7 @@ def has_next_async(self):
def probably_has_next(self):
"""Implements :meth:`QueryIterator.probably_has_next`."""
return bool(self._next_result) or any(
[
result_set.probably_has_next()
for result_set in self._result_sets
]
[result_set.probably_has_next() for result_set in self._result_sets]
)

def next(self):
Expand Down Expand Up @@ -774,9 +767,7 @@ def entity(self):
key = key_module.Key._from_ds_key(ds_key)
return key

raise NotImplementedError(
"Got unexpected entity result type for query."
)
raise NotImplementedError("Got unexpected entity result type for query.")


def _query_to_protobuf(query):
Expand All @@ -794,16 +785,13 @@ def _query_to_protobuf(query):

if query.projection:
query_args["projection"] = [
query_pb2.Projection(
property=query_pb2.PropertyReference(name=name)
)
query_pb2.Projection(property=query_pb2.PropertyReference(name=name))
for name in query.projection
]

if query.distinct_on:
query_args["distinct_on"] = [
query_pb2.PropertyReference(name=name)
for name in query.distinct_on
query_pb2.PropertyReference(name=name) for name in query.distinct_on
]

if query.order_by:
Expand Down
3 changes: 1 addition & 2 deletions google/cloud/ndb/_datastore_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,7 @@ def __init__(self, blob_key):
if isinstance(blob_key, bytes):
if len(blob_key) > _MAX_STRING_LENGTH:
raise exceptions.BadValueError(
"blob key must be under {:d} "
"bytes.".format(_MAX_STRING_LENGTH)
"blob key must be under {:d} " "bytes.".format(_MAX_STRING_LENGTH)
)
elif blob_key is not None:
raise exceptions.BadValueError(
Expand Down
8 changes: 2 additions & 6 deletions google/cloud/ndb/_eventloop.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,7 @@

log = logging.getLogger(__name__)

_Event = collections.namedtuple(
"_Event", ("when", "callback", "args", "kwargs")
)
_Event = collections.namedtuple("_Event", ("when", "callback", "args", "kwargs"))


class EventLoop(object):
Expand Down Expand Up @@ -314,9 +312,7 @@ def run0(self):
start_time = time.time()
rpc_id, rpc = self.rpc_results.get()
elapsed = time.time() - start_time
utils.logging_debug(
log, "Blocked for {}s awaiting RPC results.", elapsed
)
utils.logging_debug(log, "Blocked for {}s awaiting RPC results.", elapsed)
context.wait_time += elapsed

callback = self.rpcs.pop(rpc_id)
Expand Down
59 changes: 15 additions & 44 deletions google/cloud/ndb/_gql.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,7 @@ class GQL(object):
_limit = -1
_hint = ""

def __init__(
self, query_string, _app=None, _auth_domain=None, namespace=None
):
def __init__(self, query_string, _app=None, _auth_domain=None, namespace=None):
"""Parses the input query into the class as a pre-compiled query.
Args:
Expand Down Expand Up @@ -191,9 +189,7 @@ def _entity(self):
_quoted_identifier_regex = re.compile(r'((?:"[^"\s]+")+)$')
_conditions_regex = re.compile(r"(<=|>=|!=|=|<|>|is|in)$", re.IGNORECASE)
_number_regex = re.compile(r"(\d+)$")
_cast_regex = re.compile(
r"(geopt|user|key|date|time|datetime)$", re.IGNORECASE
)
_cast_regex = re.compile(r"(geopt|user|key|date|time|datetime)$", re.IGNORECASE)

def _Error(self, error_message):
"""Generic query error.
Expand All @@ -216,8 +212,7 @@ def _Error(self, error_message):
)

def _Accept(self, symbol_string):
"""Advance the symbol and return true if the next symbol matches input.
"""
"""Advance the symbol and return true if the next symbol matches input."""
if self._next_symbol < len(self._symbols):
if self._symbols[self._next_symbol].upper() == symbol_string:
self._next_symbol += 1
Expand Down Expand Up @@ -335,9 +330,7 @@ def _FilterList(self):

if not self._AddSimpleFilter(identifier, condition, self._Reference()):

if not self._AddSimpleFilter(
identifier, condition, self._Literal()
):
if not self._AddSimpleFilter(identifier, condition, self._Literal()):

type_cast = self._TypeCast()
if not type_cast or not self._AddProcessedParameterFilter(
Expand Down Expand Up @@ -389,13 +382,9 @@ def _CheckFilterSyntax(self, identifier, condition):
else:
self._Error('"IS" expected to follow "ANCESTOR"')
elif condition.lower() == "is":
self._Error(
'"IS" can only be used when comparing against "ANCESTOR"'
)
self._Error('"IS" can only be used when comparing against "ANCESTOR"')

def _AddProcessedParameterFilter(
self, identifier, condition, operator, parameters
):
def _AddProcessedParameterFilter(self, identifier, condition, operator, parameters):
"""Add a filter with post-processing required.
Args:
Expand Down Expand Up @@ -424,9 +413,7 @@ def _AddProcessedParameterFilter(
if operator == "list" and condition.lower() != "in":
self._Error("Only IN can process a list of values")

self._filters.setdefault(filter_rule, []).append(
(operator, parameters)
)
self._filters.setdefault(filter_rule, []).append((operator, parameters))
return True

def _AddSimpleFilter(self, identifier, condition, parameter):
Expand Down Expand Up @@ -776,9 +763,7 @@ def __repr__(self):

def _raise_not_implemented(func):
def raise_inner(value):
raise NotImplementedError(
"GQL function {} is not implemented".format(func)
)
raise NotImplementedError("GQL function {} is not implemented".format(func))

return raise_inner

Expand All @@ -795,9 +780,7 @@ def _time_function(values):
time_tuple = time.strptime(value, "%H:%M:%S")
except ValueError as error:
_raise_cast_error(
"Error during time conversion, {}, {}".format(
error, values
)
"Error during time conversion, {}, {}".format(error, values)
)
time_tuple = time_tuple[3:]
time_tuple = time_tuple[0:3]
Expand All @@ -812,9 +795,7 @@ def _time_function(values):
try:
return datetime.time(*time_tuple)
except ValueError as error:
_raise_cast_error(
"Error during time conversion, {}, {}".format(error, values)
)
_raise_cast_error("Error during time conversion, {}, {}".format(error, values))


def _date_function(values):
Expand All @@ -825,9 +806,7 @@ def _date_function(values):
time_tuple = time.strptime(value, "%Y-%m-%d")[0:6]
except ValueError as error:
_raise_cast_error(
"Error during date conversion, {}, {}".format(
error, values
)
"Error during date conversion, {}, {}".format(error, values)
)
else:
_raise_cast_error("Invalid argument for date(), {}".format(value))
Expand All @@ -838,9 +817,7 @@ def _date_function(values):
try:
return datetime.datetime(*time_tuple)
except ValueError as error:
_raise_cast_error(
"Error during date conversion, {}, {}".format(error, values)
)
_raise_cast_error("Error during date conversion, {}, {}".format(error, values))


def _datetime_function(values):
Expand All @@ -851,14 +828,10 @@ def _datetime_function(values):
time_tuple = time.strptime(value, "%Y-%m-%d %H:%M:%S")[0:6]
except ValueError as error:
_raise_cast_error(
"Error during date conversion, {}, {}".format(
error, values
)
"Error during date conversion, {}, {}".format(error, values)
)
else:
_raise_cast_error(
"Invalid argument for datetime(), {}".format(value)
)
_raise_cast_error("Invalid argument for datetime(), {}".format(value))
else:
time_tuple = values
try:
Expand All @@ -883,9 +856,7 @@ def _key_function(values):
*values, namespace=context.get_namespace(), project=client.project
)
_raise_cast_error(
"Key requires even number of operands or single string, {}".format(
values
)
"Key requires even number of operands or single string, {}".format(values)
)


Expand Down
Loading

0 comments on commit 448054f

Please sign in to comment.