Skip to content

Commit 6a12de2

Browse files
author
Chris Rossi
authored
style: upgrade black to latest version, use black default line length (#521)
* style: upgrade black to latest version, use black default line length * Upgrade to the latest version of black. * Stop passing "--line-length" argument to black, to use default, like other Google API libraries. * Add some errors for flake8 to ignore, consistent with other Google API Libraries.
1 parent c457668 commit 6a12de2

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+458
-1182
lines changed

packages/google-cloud-ndb/.flake8

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
# -*- coding: utf-8 -*-
2+
#
3+
# Copyright 2020 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# https://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
[flake8]
18+
ignore = E203, E266, E501, W503

packages/google-cloud-ndb/docs/conf.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -158,9 +158,7 @@
158158
# Grouping the document tree into LaTeX files. List of tuples
159159
# (source start file, target name, title,
160160
# author, documentclass [howto, manual, or own class]).
161-
latex_documents = [
162-
(master_doc, "ndb.tex", "ndb Documentation", "Google LLC", "manual")
163-
]
161+
latex_documents = [(master_doc, "ndb.tex", "ndb Documentation", "Google LLC", "manual")]
164162

165163

166164
# -- Options for manual page output ------------------------------------------

packages/google-cloud-ndb/google/cloud/ndb/_batch.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -43,11 +43,7 @@ def get_batch(batch_cls, options=None):
4343
if options is not None:
4444
options_key = tuple(
4545
sorted(
46-
(
47-
(key, value)
48-
for key, value in options.items()
49-
if value is not None
50-
)
46+
((key, value) for key, value in options.items() if value is not None)
5147
)
5248
)
5349
else:

packages/google-cloud-ndb/google/cloud/ndb/_cache.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,8 @@ class ContextCache(dict):
3333

3434
def get_and_validate(self, key):
3535
"""Verify that the entity's key has not changed since it was added
36-
to the cache. If it has changed, consider this a cache miss.
37-
See issue 13. http://goo.gl/jxjOP"""
36+
to the cache. If it has changed, consider this a cache miss.
37+
See issue 13. http://goo.gl/jxjOP"""
3838
entity = self[key] # May be None, meaning "doesn't exist".
3939
if entity is None or entity._key == key:
4040
return entity
@@ -58,8 +58,7 @@ def _future_result(result):
5858

5959

6060
class _GlobalCacheBatch(object):
61-
"""Abstract base for classes used to batch operations for the global cache.
62-
"""
61+
"""Abstract base for classes used to batch operations for the global cache."""
6362

6463
def full(self):
6564
"""Indicates whether more work can be added to this batch.

packages/google-cloud-ndb/google/cloud/ndb/_datastore_api.py

Lines changed: 7 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -132,9 +132,7 @@ def lookup(key, options):
132132
use_global_cache = context._use_global_cache(key, options)
133133

134134
if not (use_global_cache or use_datastore):
135-
raise TypeError(
136-
"use_global_cache and use_datastore can't both be False"
137-
)
135+
raise TypeError("use_global_cache and use_datastore can't both be False")
138136

139137
entity_pb = _NOT_FOUND
140138
key_locked = False
@@ -160,9 +158,7 @@ def lookup(key, options):
160158
if use_global_cache and not key_locked and entity_pb is not _NOT_FOUND:
161159
expires = context._global_cache_timeout(key, options)
162160
serialized = entity_pb.SerializeToString()
163-
yield _cache.global_compare_and_swap(
164-
cache_key, serialized, expires=expires
165-
)
161+
yield _cache.global_compare_and_swap(cache_key, serialized, expires=expires)
166162

167163
raise tasklets.Return(entity_pb)
168164

@@ -257,9 +253,7 @@ def lookup_callback(self, rpc):
257253
next_batch = _batch.get_batch(type(self), self.options)
258254
for key in results.deferred:
259255
todo_key = key.SerializeToString()
260-
next_batch.todo.setdefault(todo_key, []).extend(
261-
self.todo[todo_key]
262-
)
256+
next_batch.todo.setdefault(todo_key, []).extend(self.todo[todo_key])
263257

264258
# For all missing keys, set result to _NOT_FOUND and let callers decide
265259
# how to handle
@@ -331,9 +325,7 @@ def get_read_options(options, default_read_consistency=None):
331325
read_consistency = default_read_consistency
332326

333327
elif read_consistency is EVENTUAL:
334-
raise ValueError(
335-
"read_consistency must not be EVENTUAL when in transaction"
336-
)
328+
raise ValueError("read_consistency must not be EVENTUAL when in transaction")
337329

338330
return datastore_pb2.ReadOptions(
339331
read_consistency=read_consistency, transaction=transaction
@@ -380,9 +372,7 @@ def put(entity, options):
380372
use_global_cache = context._use_global_cache(entity.key, options)
381373
use_datastore = context._use_datastore(entity.key, options)
382374
if not (use_global_cache or use_datastore):
383-
raise TypeError(
384-
"use_global_cache and use_datastore can't both be False"
385-
)
375+
raise TypeError("use_global_cache and use_datastore can't both be False")
386376

387377
if not use_datastore and entity.key.is_partial:
388378
raise TypeError("Can't store partial keys when use_datastore is False")
@@ -990,9 +980,7 @@ def _datastore_allocate_ids(keys, retries=None, timeout=None):
990980
:class:`google.cloud.datastore_v1.datastore_pb2.AllocateIdsResponse`
991981
"""
992982
client = context_module.get_context().client
993-
request = datastore_pb2.AllocateIdsRequest(
994-
project_id=client.project, keys=keys
995-
)
983+
request = datastore_pb2.AllocateIdsRequest(project_id=client.project, keys=keys)
996984

997985
return make_call("AllocateIds", request, retries=retries, timeout=timeout)
998986

@@ -1050,9 +1038,7 @@ def _datastore_begin_transaction(read_only, retries=None, timeout=None):
10501038
project_id=client.project, transaction_options=options
10511039
)
10521040

1053-
return make_call(
1054-
"BeginTransaction", request, retries=retries, timeout=timeout
1055-
)
1041+
return make_call("BeginTransaction", request, retries=retries, timeout=timeout)
10561042

10571043

10581044
@tasklets.tasklet

packages/google-cloud-ndb/google/cloud/ndb/_datastore_query.py

Lines changed: 6 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -311,9 +311,7 @@ def _next_batch(self):
311311
batch.more_results == MORE_RESULTS_TYPE_NOT_FINISHED
312312
)
313313

314-
self._more_results_after_limit = (
315-
batch.more_results == MORE_RESULTS_AFTER_LIMIT
316-
)
314+
self._more_results_after_limit = batch.more_results == MORE_RESULTS_AFTER_LIMIT
317315

318316
if more_results:
319317
# Fix up query for next batch
@@ -538,9 +536,7 @@ def __init__(self, query, raw=False):
538536
self._extra_projections = extra_projections
539537

540538
queries = [
541-
query.copy(
542-
filters=node, projection=projection, offset=None, limit=None
543-
)
539+
query.copy(filters=node, projection=projection, offset=None, limit=None)
544540
for node in query.filters._nodes
545541
]
546542
self._result_sets = [iterate(_query, raw=True) for _query in queries]
@@ -625,10 +621,7 @@ def has_next_async(self):
625621
def probably_has_next(self):
626622
"""Implements :meth:`QueryIterator.probably_has_next`."""
627623
return bool(self._next_result) or any(
628-
[
629-
result_set.probably_has_next()
630-
for result_set in self._result_sets
631-
]
624+
[result_set.probably_has_next() for result_set in self._result_sets]
632625
)
633626

634627
def next(self):
@@ -774,9 +767,7 @@ def entity(self):
774767
key = key_module.Key._from_ds_key(ds_key)
775768
return key
776769

777-
raise NotImplementedError(
778-
"Got unexpected entity result type for query."
779-
)
770+
raise NotImplementedError("Got unexpected entity result type for query.")
780771

781772

782773
def _query_to_protobuf(query):
@@ -794,16 +785,13 @@ def _query_to_protobuf(query):
794785

795786
if query.projection:
796787
query_args["projection"] = [
797-
query_pb2.Projection(
798-
property=query_pb2.PropertyReference(name=name)
799-
)
788+
query_pb2.Projection(property=query_pb2.PropertyReference(name=name))
800789
for name in query.projection
801790
]
802791

803792
if query.distinct_on:
804793
query_args["distinct_on"] = [
805-
query_pb2.PropertyReference(name=name)
806-
for name in query.distinct_on
794+
query_pb2.PropertyReference(name=name) for name in query.distinct_on
807795
]
808796

809797
if query.order_by:

packages/google-cloud-ndb/google/cloud/ndb/_datastore_types.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,8 +55,7 @@ def __init__(self, blob_key):
5555
if isinstance(blob_key, bytes):
5656
if len(blob_key) > _MAX_STRING_LENGTH:
5757
raise exceptions.BadValueError(
58-
"blob key must be under {:d} "
59-
"bytes.".format(_MAX_STRING_LENGTH)
58+
"blob key must be under {:d} " "bytes.".format(_MAX_STRING_LENGTH)
6059
)
6160
elif blob_key is not None:
6261
raise exceptions.BadValueError(

packages/google-cloud-ndb/google/cloud/ndb/_eventloop.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,7 @@
3131

3232
log = logging.getLogger(__name__)
3333

34-
_Event = collections.namedtuple(
35-
"_Event", ("when", "callback", "args", "kwargs")
36-
)
34+
_Event = collections.namedtuple("_Event", ("when", "callback", "args", "kwargs"))
3735

3836

3937
class EventLoop(object):
@@ -314,9 +312,7 @@ def run0(self):
314312
start_time = time.time()
315313
rpc_id, rpc = self.rpc_results.get()
316314
elapsed = time.time() - start_time
317-
utils.logging_debug(
318-
log, "Blocked for {}s awaiting RPC results.", elapsed
319-
)
315+
utils.logging_debug(log, "Blocked for {}s awaiting RPC results.", elapsed)
320316
context.wait_time += elapsed
321317

322318
callback = self.rpcs.pop(rpc_id)

packages/google-cloud-ndb/google/cloud/ndb/_gql.py

Lines changed: 15 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -93,9 +93,7 @@ class GQL(object):
9393
_limit = -1
9494
_hint = ""
9595

96-
def __init__(
97-
self, query_string, _app=None, _auth_domain=None, namespace=None
98-
):
96+
def __init__(self, query_string, _app=None, _auth_domain=None, namespace=None):
9997
"""Parses the input query into the class as a pre-compiled query.
10098
10199
Args:
@@ -191,9 +189,7 @@ def _entity(self):
191189
_quoted_identifier_regex = re.compile(r'((?:"[^"\s]+")+)$')
192190
_conditions_regex = re.compile(r"(<=|>=|!=|=|<|>|is|in)$", re.IGNORECASE)
193191
_number_regex = re.compile(r"(\d+)$")
194-
_cast_regex = re.compile(
195-
r"(geopt|user|key|date|time|datetime)$", re.IGNORECASE
196-
)
192+
_cast_regex = re.compile(r"(geopt|user|key|date|time|datetime)$", re.IGNORECASE)
197193

198194
def _Error(self, error_message):
199195
"""Generic query error.
@@ -216,8 +212,7 @@ def _Error(self, error_message):
216212
)
217213

218214
def _Accept(self, symbol_string):
219-
"""Advance the symbol and return true if the next symbol matches input.
220-
"""
215+
"""Advance the symbol and return true if the next symbol matches input."""
221216
if self._next_symbol < len(self._symbols):
222217
if self._symbols[self._next_symbol].upper() == symbol_string:
223218
self._next_symbol += 1
@@ -335,9 +330,7 @@ def _FilterList(self):
335330

336331
if not self._AddSimpleFilter(identifier, condition, self._Reference()):
337332

338-
if not self._AddSimpleFilter(
339-
identifier, condition, self._Literal()
340-
):
333+
if not self._AddSimpleFilter(identifier, condition, self._Literal()):
341334

342335
type_cast = self._TypeCast()
343336
if not type_cast or not self._AddProcessedParameterFilter(
@@ -389,13 +382,9 @@ def _CheckFilterSyntax(self, identifier, condition):
389382
else:
390383
self._Error('"IS" expected to follow "ANCESTOR"')
391384
elif condition.lower() == "is":
392-
self._Error(
393-
'"IS" can only be used when comparing against "ANCESTOR"'
394-
)
385+
self._Error('"IS" can only be used when comparing against "ANCESTOR"')
395386

396-
def _AddProcessedParameterFilter(
397-
self, identifier, condition, operator, parameters
398-
):
387+
def _AddProcessedParameterFilter(self, identifier, condition, operator, parameters):
399388
"""Add a filter with post-processing required.
400389
401390
Args:
@@ -424,9 +413,7 @@ def _AddProcessedParameterFilter(
424413
if operator == "list" and condition.lower() != "in":
425414
self._Error("Only IN can process a list of values")
426415

427-
self._filters.setdefault(filter_rule, []).append(
428-
(operator, parameters)
429-
)
416+
self._filters.setdefault(filter_rule, []).append((operator, parameters))
430417
return True
431418

432419
def _AddSimpleFilter(self, identifier, condition, parameter):
@@ -776,9 +763,7 @@ def __repr__(self):
776763

777764
def _raise_not_implemented(func):
778765
def raise_inner(value):
779-
raise NotImplementedError(
780-
"GQL function {} is not implemented".format(func)
781-
)
766+
raise NotImplementedError("GQL function {} is not implemented".format(func))
782767

783768
return raise_inner
784769

@@ -795,9 +780,7 @@ def _time_function(values):
795780
time_tuple = time.strptime(value, "%H:%M:%S")
796781
except ValueError as error:
797782
_raise_cast_error(
798-
"Error during time conversion, {}, {}".format(
799-
error, values
800-
)
783+
"Error during time conversion, {}, {}".format(error, values)
801784
)
802785
time_tuple = time_tuple[3:]
803786
time_tuple = time_tuple[0:3]
@@ -812,9 +795,7 @@ def _time_function(values):
812795
try:
813796
return datetime.time(*time_tuple)
814797
except ValueError as error:
815-
_raise_cast_error(
816-
"Error during time conversion, {}, {}".format(error, values)
817-
)
798+
_raise_cast_error("Error during time conversion, {}, {}".format(error, values))
818799

819800

820801
def _date_function(values):
@@ -825,9 +806,7 @@ def _date_function(values):
825806
time_tuple = time.strptime(value, "%Y-%m-%d")[0:6]
826807
except ValueError as error:
827808
_raise_cast_error(
828-
"Error during date conversion, {}, {}".format(
829-
error, values
830-
)
809+
"Error during date conversion, {}, {}".format(error, values)
831810
)
832811
else:
833812
_raise_cast_error("Invalid argument for date(), {}".format(value))
@@ -838,9 +817,7 @@ def _date_function(values):
838817
try:
839818
return datetime.datetime(*time_tuple)
840819
except ValueError as error:
841-
_raise_cast_error(
842-
"Error during date conversion, {}, {}".format(error, values)
843-
)
820+
_raise_cast_error("Error during date conversion, {}, {}".format(error, values))
844821

845822

846823
def _datetime_function(values):
@@ -851,14 +828,10 @@ def _datetime_function(values):
851828
time_tuple = time.strptime(value, "%Y-%m-%d %H:%M:%S")[0:6]
852829
except ValueError as error:
853830
_raise_cast_error(
854-
"Error during date conversion, {}, {}".format(
855-
error, values
856-
)
831+
"Error during date conversion, {}, {}".format(error, values)
857832
)
858833
else:
859-
_raise_cast_error(
860-
"Invalid argument for datetime(), {}".format(value)
861-
)
834+
_raise_cast_error("Invalid argument for datetime(), {}".format(value))
862835
else:
863836
time_tuple = values
864837
try:
@@ -883,9 +856,7 @@ def _key_function(values):
883856
*values, namespace=context.get_namespace(), project=client.project
884857
)
885858
_raise_cast_error(
886-
"Key requires even number of operands or single string, {}".format(
887-
values
888-
)
859+
"Key requires even number of operands or single string, {}".format(values)
889860
)
890861

891862

0 commit comments

Comments
 (0)