diff --git a/gcloud/_helpers.py b/gcloud/_helpers.py index c6267888be36..a2c4289c1e59 100644 --- a/gcloud/_helpers.py +++ b/gcloud/_helpers.py @@ -26,6 +26,10 @@ from threading import local as Local from google.protobuf import timestamp_pb2 +try: + from google.appengine.api import app_identity +except ImportError: + app_identity = None import six from six.moves.http_client import HTTPConnection from six.moves import configparser @@ -33,11 +37,6 @@ from gcloud.environment_vars import PROJECT from gcloud.environment_vars import CREDENTIALS -try: - from google.appengine.api import app_identity -except ImportError: - app_identity = None - _NOW = datetime.datetime.utcnow # To be replaced by tests. _RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ' @@ -77,8 +76,9 @@ def push(self, resource): def pop(self): """Pop a resource from our stack. - :raises: IndexError if the stack is empty. + :rtype: object :returns: the top-most resource, after removing it. + :raises IndexError: if the stack is empty. """ return self._stack.pop() @@ -86,6 +86,7 @@ def pop(self): def top(self): """Get the top-most resource + :rtype: object :returns: the top-most item, or None if the stack is empty. """ if len(self._stack) > 0: @@ -141,8 +142,7 @@ def _ensure_tuple_or_list(arg_name, tuple_or_list): :rtype: list of str :returns: The ``tuple_or_list`` passed in cast to a ``list``. - :raises: class:`TypeError` if the ``tuple_or_list`` is not a tuple or - list. + :raises TypeError: if the ``tuple_or_list`` is not a tuple or list. """ if not isinstance(tuple_or_list, (tuple, list)): raise TypeError('Expected %s to be a tuple or list. ' @@ -392,6 +392,8 @@ def _rfc3339_nanos_to_datetime(dt_str): :rtype: :class:`datetime.datetime` :returns: The datetime object created from the string. + :raises ValueError: If the timestamp does not match the RFC 3339 + regular expression. """ with_nanos = _RFC3339_NANOS.match(dt_str) if with_nanos is None: @@ -439,8 +441,7 @@ def _to_bytes(value, encoding='ascii'): :rtype: str / bytes :returns: The original value converted to bytes (if unicode) or as passed in if it started out as bytes. - :raises: :class:`TypeError ` if the value - could not be converted to bytes. + :raises TypeError: if the value could not be converted to bytes. """ result = (value.encode(encoding) if isinstance(value, six.text_type) else value) @@ -460,8 +461,7 @@ def _bytes_to_unicode(value): :returns: The original value converted to unicode (if bytes) or as passed in if it started out as unicode. - :raises: :class:`ValueError` if the value could not be converted to - unicode. + :raises ValueError: if the value could not be converted to unicode. """ result = (value.decode('utf-8') if isinstance(value, six.binary_type) else value) @@ -522,9 +522,9 @@ def _name_from_project_path(path, project, template): :rtype: str :returns: Name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. + :raises ValueError: if the ``path`` is ill-formed or if the project from + the ``path`` does not agree with the ``project`` + passed in. """ if isinstance(template, str): template = re.compile(template) diff --git a/gcloud/bigquery/dataset.py b/gcloud/bigquery/dataset.py index 397dbe244353..a8f7e18f4631 100644 --- a/gcloud/bigquery/dataset.py +++ b/gcloud/bigquery/dataset.py @@ -432,6 +432,9 @@ def exists(self, client=None): :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. + + :rtype: bool + :returns: Boolean indicating existence of the dataset. """ client = self._require_client(client) diff --git a/gcloud/bigquery/job.py b/gcloud/bigquery/job.py index db62a7ab389f..2399b1dde05e 100644 --- a/gcloud/bigquery/job.py +++ b/gcloud/bigquery/job.py @@ -319,6 +319,9 @@ def exists(self, client=None): :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. + + :rtype: bool + :returns: Boolean indicating existence of the job. """ client = self._require_client(client) @@ -869,6 +872,8 @@ class _AsyncQueryConfiguration(object): _flatten_results = None _priority = None _use_query_cache = None + _use_legacy_sql = None + _udf_resources = None _write_disposition = None @@ -927,6 +932,18 @@ def __init__(self, name, query, client): https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.useQueryCache """ + use_legacy_sql = _TypedProperty('use_legacy_sql', bool) + """See: + https://cloud.google.com/bigquery/docs/\ + reference/v2/jobs#configuration.query.useLegacySql + """ + + udf_resources = _TypedProperty( + 'udf_resources', list) + """See: + https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.userDefinedFunctionResources + """ + write_disposition = WriteDisposition('write_disposition') """See: https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query.writeDisposition @@ -965,6 +982,12 @@ def _populate_config_resource(self, configuration): configuration['priority'] = self.priority if self.use_query_cache is not None: configuration['useQueryCache'] = self.use_query_cache + if self.use_legacy_sql is not None: + configuration['useLegacySql'] = self.use_legacy_sql + if self.udf_resources is not None: + configuration['userDefinedFunctionResources'] = ( + self.udf_resources + ) if self.write_disposition is not None: configuration['writeDisposition'] = self.write_disposition diff --git a/gcloud/bigquery/query.py b/gcloud/bigquery/query.py index 4dd378af9c95..b9ebec2d7218 100644 --- a/gcloud/bigquery/query.py +++ b/gcloud/bigquery/query.py @@ -34,6 +34,7 @@ class _SyncQueryConfiguration(object): _timeout_ms = None _preserve_nulls = None _use_query_cache = None + _use_legacy_sql = None class QueryResults(object): @@ -233,6 +234,12 @@ def schema(self): https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#useQueryCache """ + use_legacy_sql = _TypedProperty('use_legacy_sql', bool) + """See: + https://cloud.google.com/bigquery/docs/\ + reference/v2/jobs/query#useLegacySql + """ + def _set_properties(self, api_response): """Update properties from resource in body of ``api_response`` @@ -264,6 +271,9 @@ def _build_resource(self): if self.use_query_cache is not None: resource['useQueryCache'] = self.use_query_cache + if self.use_legacy_sql is not None: + resource['useLegacySql'] = self.use_legacy_sql + if self.dry_run is not None: resource['dryRun'] = self.dry_run diff --git a/gcloud/bigquery/table.py b/gcloud/bigquery/table.py index 7bd7f818ee8d..ec6f7a45bf0c 100644 --- a/gcloud/bigquery/table.py +++ b/gcloud/bigquery/table.py @@ -461,6 +461,9 @@ def exists(self, client=None): :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. + + :rtype: bool + :returns: Boolean indicating existence of the table. """ client = self._require_client(client) diff --git a/gcloud/bigquery/test_job.py b/gcloud/bigquery/test_job.py index 64660706be2e..7647916c7795 100644 --- a/gcloud/bigquery/test_job.py +++ b/gcloud/bigquery/test_job.py @@ -1219,6 +1219,7 @@ class TestQueryJob(unittest2.TestCase, _Base): JOB_TYPE = 'query' QUERY = 'select count(*) from persons' DESTINATION_TABLE = 'destination_table' + UDF = {"resourceUri": "gs://backet/functions.js", "inlineCode": ""} def _getTargetClass(self): from gcloud.bigquery.job import QueryJob @@ -1248,6 +1249,11 @@ def _verifyBooleanResourceProperties(self, job, config): config['useQueryCache']) else: self.assertTrue(job.use_query_cache is None) + if 'useLegacySql' in config: + self.assertEqual(job.use_legacy_sql, + config['useLegacySql']) + else: + self.assertTrue(job.use_legacy_sql is None) def _verifyResourceProperties(self, job, resource): self._verifyReadonlyResourceProperties(job, resource) @@ -1310,6 +1316,7 @@ def test_ctor(self): self.assertTrue(job.flatten_results is None) self.assertTrue(job.priority is None) self.assertTrue(job.use_query_cache is None) + self.assertTrue(job.use_legacy_sql is None) self.assertTrue(job.write_disposition is None) def test_from_api_repr_missing_identity(self): @@ -1420,6 +1427,8 @@ def test_begin_w_alternate_client(self): 'flattenResults': True, 'priority': 'INTERACTIVE', 'useQueryCache': True, + 'useLegacySql': True, + 'userDefinedFunctionResources': [self.UDF], 'writeDisposition': 'WRITE_TRUNCATE', } RESOURCE['configuration']['query'] = QUERY_CONFIGURATION @@ -1439,6 +1448,8 @@ def test_begin_w_alternate_client(self): job.flatten_results = True job.priority = 'INTERACTIVE' job.use_query_cache = True + job.use_legacy_sql = True + job.udf_resources = [self.UDF] job.write_disposition = 'WRITE_TRUNCATE' job.begin(client=client2) diff --git a/gcloud/bigquery/test_query.py b/gcloud/bigquery/test_query.py index 7138e63016d5..657c963a46c7 100644 --- a/gcloud/bigquery/test_query.py +++ b/gcloud/bigquery/test_query.py @@ -136,6 +136,7 @@ def test_ctor(self): self.assertTrue(query.max_results is None) self.assertTrue(query.preserve_nulls is None) self.assertTrue(query.use_query_cache is None) + self.assertTrue(query.use_legacy_sql is None) def test_job_wo_jobid(self): client = _Client(self.PROJECT) @@ -206,6 +207,7 @@ def test_run_w_alternate_client(self): query.preserve_nulls = True query.timeout_ms = 20000 query.use_query_cache = False + query.use_legacy_sql = True query.dry_run = True query.run(client=client2) @@ -226,6 +228,7 @@ def test_run_w_alternate_client(self): 'preserveNulls': True, 'timeoutMs': 20000, 'useQueryCache': False, + 'useLegacySql': True, } self.assertEqual(req['data'], SENT) self._verifyResourceProperties(query, RESOURCE) diff --git a/gcloud/bigtable/happybase/__init__.py b/gcloud/bigtable/happybase/__init__.py index 076a28d7c4d4..edb1ebf805bb 100644 --- a/gcloud/bigtable/happybase/__init__.py +++ b/gcloud/bigtable/happybase/__init__.py @@ -21,26 +21,25 @@ ------------------------- Some concepts from HBase/Thrift do not map directly to the Cloud -Bigtable API. As a result, the following instance methods and functions -could not be implemented: +Bigtable API. As a result +* :meth:`Table.regions() ` + could not be implemented since tables in Cloud Bigtable do not expose + internal storage details * :meth:`Connection.enable_table() \ - ` - no - concept of enabled/disabled + ` + does nothing since Cloud Bigtable has no concept of enabled/disabled * :meth:`Connection.disable_table() \ - ` - no - concept of enabled/disabled + ` + does nothing since Cloud Bigtable has no concept of enabled/disabled * :meth:`Connection.is_table_enabled() \ ` - - no concept of enabled/disabled + always returns :data:`True` since Cloud Bigtable has no concept of + enabled/disabled * :meth:`Connection.compact_table() \ - ` - - table storage is opaque to user -* :meth:`Table.regions() ` - - tables in Cloud Bigtable do not expose internal storage details -* :meth:`Table.counter_set() \ - ` - method can't - be atomic, so we disable it + ` + does nothing since Cloud Bigtable handles table compactions automatically + and does not expose an API for it * The ``__version__`` value for the HappyBase package is :data:`None`. However, it's worth nothing this implementation was based off HappyBase 0.9. diff --git a/gcloud/bigtable/happybase/connection.py b/gcloud/bigtable/happybase/connection.py index ebea84e93998..86df4f3e78ee 100644 --- a/gcloud/bigtable/happybase/connection.py +++ b/gcloud/bigtable/happybase/connection.py @@ -49,9 +49,16 @@ _LEGACY_ARGS = frozenset(('host', 'port', 'compat', 'transport', 'protocol')) _WARN = warnings.warn +_BASE_DISABLE = 'Cloud Bigtable has no concept of enabled / disabled tables.' _DISABLE_DELETE_MSG = ('The disable argument should not be used in ' - 'delete_table(). Cloud Bigtable has no concept ' - 'of enabled / disabled tables.') + 'delete_table(). ') + _BASE_DISABLE +_ENABLE_TMPL = 'Connection.enable_table(%r) was called, but ' + _BASE_DISABLE +_DISABLE_TMPL = 'Connection.disable_table(%r) was called, but ' + _BASE_DISABLE +_IS_ENABLED_TMPL = ('Connection.is_table_enabled(%r) was called, but ' + + _BASE_DISABLE) +_COMPACT_TMPL = ('Connection.compact_table(%r, major=%r) was called, but the ' + 'Cloud Bigtable API handles table compactions automatically ' + 'and does not expose an API for it.') def _get_instance(timeout=None): @@ -71,8 +78,8 @@ def _get_instance(timeout=None): :rtype: :class:`gcloud.bigtable.instance.Instance` :returns: The unique instance owned by the project inferred from the environment. - :raises: :class:`ValueError ` if there is a failed - location or any number of instances other than one. + :raises ValueError: if there is a failed location or any number of + instances other than one. """ client_kwargs = {'admin': True} if timeout is not None: @@ -182,9 +189,8 @@ def _handle_legacy_args(arguments_dict): :type arguments_dict: dict :param arguments_dict: Unused keyword arguments. - :raises: :class:`TypeError ` if a keyword other - than ``host``, ``port``, ``compat``, ``transport`` or - ``protocol`` is used. + :raises TypeError: if a keyword other than ``host``, ``port``, + ``compat``, ``transport`` or ``protocol`` is used. """ common_args = _LEGACY_ARGS.intersection(six.iterkeys(arguments_dict)) if common_args: @@ -322,10 +328,12 @@ def create_table(self, name, families): * :class:`dict` * :class:`.GarbageCollectionRule` - :raises: :class:`TypeError ` if ``families`` is - not a dictionary, - :class:`ValueError ` if ``families`` - has no entries + :raises TypeError: If ``families`` is not a dictionary. + :raises ValueError: If ``families`` has no entries. + :raises AlreadyExists: If creation fails due to an already + existing table. + :raises NetworkError: If creation fails for a reason other than + table exists. """ if not isinstance(families, dict): raise TypeError('families arg must be a dictionary') @@ -378,61 +386,70 @@ def delete_table(self, name, disable=False): name = self._table_name(name) _LowLevelTable(name, self._instance).delete() - def enable_table(self, name): + @staticmethod + def enable_table(name): """Enable the specified table. .. warning:: Cloud Bigtable has no concept of enabled / disabled tables so this - method does not work. It is provided simply for compatibility. + method does nothing. It is provided simply for compatibility. - :raises: :class:`NotImplementedError ` - always + :type name: str + :param name: The name of the table to be enabled. """ - raise NotImplementedError('The Cloud Bigtable API has no concept of ' - 'enabled or disabled tables.') + _WARN(_ENABLE_TMPL % (name,)) - def disable_table(self, name): + @staticmethod + def disable_table(name): """Disable the specified table. .. warning:: Cloud Bigtable has no concept of enabled / disabled tables so this - method does not work. It is provided simply for compatibility. + method does nothing. It is provided simply for compatibility. - :raises: :class:`NotImplementedError ` - always + :type name: str + :param name: The name of the table to be disabled. """ - raise NotImplementedError('The Cloud Bigtable API has no concept of ' - 'enabled or disabled tables.') + _WARN(_DISABLE_TMPL % (name,)) - def is_table_enabled(self, name): + @staticmethod + def is_table_enabled(name): """Return whether the specified table is enabled. .. warning:: Cloud Bigtable has no concept of enabled / disabled tables so this - method does not work. It is provided simply for compatibility. + method always returns :data:`True`. It is provided simply for + compatibility. - :raises: :class:`NotImplementedError ` - always + :type name: str + :param name: The name of the table to check enabled / disabled status. + + :rtype: bool + :returns: The value :data:`True` always. """ - raise NotImplementedError('The Cloud Bigtable API has no concept of ' - 'enabled or disabled tables.') + _WARN(_IS_ENABLED_TMPL % (name,)) + return True - def compact_table(self, name, major=False): + @staticmethod + def compact_table(name, major=False): """Compact the specified table. .. warning:: - Cloud Bigtable does not support compacting a table, so this - method does not work. It is provided simply for compatibility. + Cloud Bigtable supports table compactions, it just doesn't expose + an API for that feature, so this method does nothing. It is + provided simply for compatibility. + + :type name: str + :param name: The name of the table to compact. - :raises: :class:`NotImplementedError ` - always + :type major: bool + :param major: Whether to perform a major compaction. """ - raise NotImplementedError('The Cloud Bigtable API does not support ' - 'compacting a table.') + _WARN(_COMPACT_TMPL % (name, major)) def _parse_family_option(option): diff --git a/gcloud/bigtable/happybase/pool.py b/gcloud/bigtable/happybase/pool.py index 1ed22cdd6c84..f670065fb049 100644 --- a/gcloud/bigtable/happybase/pool.py +++ b/gcloud/bigtable/happybase/pool.py @@ -113,12 +113,13 @@ def connection(self, timeout=None): If ``timeout`` is omitted, this method waits forever for a connection to become available from the local queue. + Yields an active :class:`Connection <.happybase.connection.Connection>` + from the pool. + :type timeout: int :param timeout: (Optional) Time (in seconds) to wait for a connection to open. - :rtype: :class:`Connection <.happybase.connection.Connection>` - :returns: An active connection from the pool. :raises: :class:`NoConnectionsAvailable` if no connection can be retrieved from the pool before the ``timeout`` (only if a timeout is specified). diff --git a/gcloud/bigtable/happybase/table.py b/gcloud/bigtable/happybase/table.py index e35bb8090494..275e0042bf14 100644 --- a/gcloud/bigtable/happybase/table.py +++ b/gcloud/bigtable/happybase/table.py @@ -42,6 +42,7 @@ _WARN = warnings.warn +_PACK_I64 = struct.Struct('>q').pack _UNPACK_I64 = struct.Struct('>q').unpack _SIMPLE_GC_RULES = (MaxAgeGCRule, MaxVersionsGCRule) @@ -531,9 +532,11 @@ def counter_get(self, row, column): def counter_set(self, row, column, value=0): """Set a counter column to a specific value. - This method is provided in HappyBase, but we do not provide it here - because it defeats the purpose of using atomic increment and decrement - of a counter. + .. note:: + + Be careful using this method. It can be useful for setting the + initial value of a counter, but it defeats the purpose of using + atomic increment and decrement. :type row: str :param row: Row key for the row we are setting a counter in. @@ -544,13 +547,8 @@ def counter_set(self, row, column, value=0): :type value: int :param value: Value to set the counter to. - - :raises: :class:`NotImplementedError ` - always """ - raise NotImplementedError('Table.counter_set will not be implemented. ' - 'Instead use the increment/decrement ' - 'methods along with counter_get.') + self.put(row, {column: _PACK_I64(value)}) def counter_inc(self, row, column, value=1): """Atomically increment a counter column. diff --git a/gcloud/bigtable/happybase/test_connection.py b/gcloud/bigtable/happybase/test_connection.py index 6236539db71f..f70c69eaaea5 100644 --- a/gcloud/bigtable/happybase/test_connection.py +++ b/gcloud/bigtable/happybase/test_connection.py @@ -488,37 +488,81 @@ def mock_warn(msg): self.assertEqual(warned, [MUT._DISABLE_DELETE_MSG]) def test_enable_table(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + instance = _Instance() # Avoid implicit environ check. connection = self._makeOne(autoconnect=False, instance=instance) name = 'table-name' - with self.assertRaises(NotImplementedError): + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + with _Monkey(MUT, _WARN=mock_warn): connection.enable_table(name) + self.assertEqual(warned, [MUT._ENABLE_TMPL % (name,)]) + def test_disable_table(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + instance = _Instance() # Avoid implicit environ check. connection = self._makeOne(autoconnect=False, instance=instance) name = 'table-name' - with self.assertRaises(NotImplementedError): + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + with _Monkey(MUT, _WARN=mock_warn): connection.disable_table(name) + self.assertEqual(warned, [MUT._DISABLE_TMPL % (name,)]) + def test_is_table_enabled(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + instance = _Instance() # Avoid implicit environ check. connection = self._makeOne(autoconnect=False, instance=instance) name = 'table-name' - with self.assertRaises(NotImplementedError): - connection.is_table_enabled(name) + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + with _Monkey(MUT, _WARN=mock_warn): + result = connection.is_table_enabled(name) + + self.assertTrue(result) + self.assertEqual(warned, [MUT._IS_ENABLED_TMPL % (name,)]) def test_compact_table(self): + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import connection as MUT + instance = _Instance() # Avoid implicit environ check. connection = self._makeOne(autoconnect=False, instance=instance) name = 'table-name' - major = True - with self.assertRaises(NotImplementedError): - connection.compact_table(name, major=major) + + warned = [] + + def mock_warn(msg): + warned.append(msg) + + with _Monkey(MUT, _WARN=mock_warn): + connection.compact_table(name) + + self.assertEqual(warned, [MUT._COMPACT_TMPL % (name, False)]) class Test__parse_family_option(unittest2.TestCase): diff --git a/gcloud/bigtable/happybase/test_table.py b/gcloud/bigtable/happybase/test_table.py index 7efa1864d91d..f5734a7e2c7a 100644 --- a/gcloud/bigtable/happybase/test_table.py +++ b/gcloud/bigtable/happybase/test_table.py @@ -889,15 +889,45 @@ def _counter_inc_helper(self, row, column, value, commit_result): {tuple(column.split(':')): incremented_value}) def test_counter_set(self): + import struct + from gcloud._testing import _Monkey + from gcloud.bigtable.happybase import table as MUT + name = 'table-name' connection = None table = self._makeOne(name, connection) + batches_created = [] + + def make_batch(*args, **kwargs): + result = _MockBatch(*args, **kwargs) + batches_created.append(result) + return result row = 'row-key' column = 'fam:col1' value = 42 - with self.assertRaises(NotImplementedError): - table.counter_set(row, column, value=value) + with _Monkey(MUT, Batch=make_batch): + result = table.counter_set(row, column, value=value) + + # There is no return value. + self.assertEqual(result, None) + + # Check how the batch was created and used. + batch, = batches_created + self.assertTrue(isinstance(batch, _MockBatch)) + self.assertEqual(batch.args, (table,)) + expected_kwargs = { + 'timestamp': None, + 'batch_size': None, + 'transaction': False, + 'wal': MUT._WAL_SENTINEL, + } + self.assertEqual(batch.kwargs, expected_kwargs) + # Make sure it was a successful context manager + self.assertEqual(batch.exit_vals, [(None, None, None)]) + data = {column: struct.Struct('>q').pack(value)} + self.assertEqual(batch.put_args, [(row, data)]) + self.assertEqual(batch.delete_args, []) def test_counter_inc(self): import struct diff --git a/gcloud/connection.py b/gcloud/connection.py index b7518d020afc..1a96086a35a6 100644 --- a/gcloud/connection.py +++ b/gcloud/connection.py @@ -324,6 +324,9 @@ def api_request(self, method, path, query_params=None, initialization of the object at a later time. :raises: Exception if the response code is not 200 OK. + :rtype: dict or str + :returns: The API response payload, either as a raw string or + a dictionary if the response is valid JSON. """ url = self.build_api_url(path=path, query_params=query_params, api_base_url=api_base_url, diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index 6393c2ec99bb..8420de90d5d0 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -14,14 +14,14 @@ """Shortcut methods for getting set up with Google Cloud Datastore. -You'll typically use these to get started with the API: - ->>> from gcloud import datastore ->>> ->>> client = datastore.Client() ->>> key = client.key('EntityKind', 1234) ->>> entity = datastore.Entity(key) ->>> query = client.query(kind='EntityKind') +You'll typically use these to get started with the API:: + + >>> from gcloud import datastore + >>> + >>> client = datastore.Client() + >>> key = client.key('EntityKind', 1234) + >>> entity = datastore.Entity(key) + >>> query = client.query(kind='EntityKind') The main concepts with this API are: diff --git a/gcloud/datastore/client.py b/gcloud/datastore/client.py index fc8b36b46910..a83af078f4d4 100644 --- a/gcloud/datastore/client.py +++ b/gcloud/datastore/client.py @@ -366,7 +366,7 @@ def delete(self, key): :type key: :class:`gcloud.datastore.key.Key` :param key: The key to be deleted from the datastore. """ - return self.delete_multi(keys=[key]) + self.delete_multi(keys=[key]) def delete_multi(self, keys): """Delete keys from the Cloud Datastore. diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 63a82adf19ff..de030bc04470 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -112,10 +112,13 @@ def _rpc(self, project, method, request_pb, response_pb_cls): :type request_pb: :class:`google.protobuf.message.Message` instance :param request_pb: the protobuf instance representing the request. - :type response_pb_cls: A :class:`google.protobuf.message.Message' + :type response_pb_cls: A :class:`google.protobuf.message.Message` subclass. :param response_pb_cls: The class used to unmarshall the response protobuf. + + :rtype: :class:`google.protobuf.message.Message` + :returns: The RPC message parsed from the response. """ response = self._request(project=project, method=method, data=request_pb.SerializeToString()) @@ -142,6 +145,9 @@ def build_api_url(self, project, method, base_url=None, :type api_version: string :param api_version: The version of the API to connect to. You shouldn't have to provide this. + + :rtype: str + :returns: The API URL created. """ return self.API_URL_TEMPLATE.format( api_base=(base_url or self.api_base_url), @@ -322,9 +328,9 @@ def commit(self, project, request, transaction_id): This method will mutate ``request`` before using it. :rtype: tuple - :returns': The pair of the number of index updates and a list of - :class:`._generated.entity_pb2.Key` for each incomplete key - that was completed in the commit. + :returns: The pair of the number of index updates and a list of + :class:`._generated.entity_pb2.Key` for each incomplete key + that was completed in the commit. """ if transaction_id: request.mode = _datastore_pb2.CommitRequest.TRANSACTIONAL @@ -415,9 +421,9 @@ def _parse_commit_response(commit_response_pb): :param commit_response_pb: The protobuf response from a commit request. :rtype: tuple - :returns': The pair of the number of index updates and a list of - :class:`._generated.entity_pb2.Key` for each incomplete key - that was completed in the commit. + :returns: The pair of the number of index updates and a list of + :class:`._generated.entity_pb2.Key` for each incomplete key + that was completed in the commit. """ mut_results = commit_response_pb.mutation_results index_updates = commit_response_pb.index_updates diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 0d5ce4b18bcb..7021d3d7e7f5 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -131,6 +131,7 @@ def exclude_from_indexes(self): """Names of fields which are *not* to be indexed for this entity. :rtype: sequence of field names + :returns: The set of fields excluded from indexes. """ return frozenset(self._exclude_from_indexes) diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index 6b0ff82772ab..e13f7a51b039 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -296,6 +296,7 @@ def _pb_attr_value(val): bool, float, integer, string :param val: The value to be scrutinized. + :rtype: tuple :returns: A tuple of the attribute name and proper value type. """ @@ -341,6 +342,7 @@ def _get_value_from_value_pb(value_pb): :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value` :param value_pb: The Value Protobuf. + :rtype: object :returns: The value provided by the Protobuf. :raises: :class:`ValueError ` if no value type has been set. diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index a6ce955cee38..2b2a7928221a 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -101,6 +101,7 @@ def project(self): """Get the project for this Query. :rtype: str + :returns: The project for the query. """ return self._project or self._client.project @@ -128,6 +129,7 @@ def kind(self): """Get the Kind of the Query. :rtype: string + :returns: The kind for the query. """ return self._kind @@ -155,6 +157,7 @@ def ancestor(self): """The ancestor key for the query. :rtype: Key or None + :returns: The ancestor for the query. """ return self._ancestor @@ -179,6 +182,7 @@ def filters(self): """Filters set on the query. :rtype: sequence of (property_name, operator, value) tuples. + :returns: The filters set on the query. """ return self._filters[:] @@ -267,6 +271,7 @@ def order(self): """Names of fields used to sort query results. :rtype: sequence of string + :returns: The order(s) set on the query. """ return self._order[:] @@ -291,6 +296,7 @@ def distinct_on(self): """Names of fields used to group query results. :rtype: sequence of string + :returns: The "distinct on" fields set on the query. """ return self._distinct_on[:] @@ -338,6 +344,7 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None, If not supplied, uses the query's value. :rtype: :class:`Iterator` + :returns: The iterator for the query. :raises: ValueError if ``connection`` is not passed and no implicit default has been set. """ @@ -400,6 +407,7 @@ def next_page(self): to iterate on the current Iterator. :rtype: tuple, (entities, more_results, cursor) + :returns: The next page of results. """ pb = _pb_from_query(self._query) diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index dc78c7ba99f9..c3ee420a1d36 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -112,6 +112,7 @@ def current(self): returns None. :rtype: :class:`gcloud.datastore.transaction.Transaction` or None + :returns: The current transaction (if any are active). """ top = super(Transaction, self).current() if isinstance(top, Transaction): diff --git a/gcloud/dns/changes.py b/gcloud/dns/changes.py index e3e05e723397..58ab3bfe8bf2 100644 --- a/gcloud/dns/changes.py +++ b/gcloud/dns/changes.py @@ -229,6 +229,9 @@ def exists(self, client=None): :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current zone. + + :rtype: bool + :returns: Boolean indicating existence of the changes. """ client = self._require_client(client) try: diff --git a/gcloud/dns/zone.py b/gcloud/dns/zone.py index 2a7df03da6ea..d6e9c569b3db 100644 --- a/gcloud/dns/zone.py +++ b/gcloud/dns/zone.py @@ -267,6 +267,9 @@ def exists(self, client=None): :type client: :class:`gcloud.dns.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current zone. + + :rtype: bool + :returns: Boolean indicating existence of the managed zone. """ client = self._require_client(client) diff --git a/gcloud/iterator.py b/gcloud/iterator.py index f62d28578ea5..14fdd905f300 100644 --- a/gcloud/iterator.py +++ b/gcloud/iterator.py @@ -136,9 +136,6 @@ def get_items_from_response(self, response): :type response: dict :param response: The response of asking for the next page of items. - - :rtype: iterable - :returns: Items that the iterator should yield. """ raise NotImplementedError diff --git a/gcloud/logging/_gax.py b/gcloud/logging/_gax.py index 3f34645f3588..5b70e2abf61a 100644 --- a/gcloud/logging/_gax.py +++ b/gcloud/logging/_gax.py @@ -201,6 +201,10 @@ def sink_get(self, project, sink_name): :type sink_name: string :param sink_name: the name of the sink + + :rtype: dict + :returns: The sink object returned from the API (converted from a + protobuf to a dictionary). """ options = None path = 'projects/%s/sinks/%s' % (project, sink_name) @@ -228,6 +232,10 @@ def sink_update(self, project, sink_name, filter_, destination): :type destination: string :param destination: destination URI for the entries exported by the sink. + + :rtype: dict + :returns: The sink object returned from the API (converted from a + protobuf to a dictionary). """ options = None path = 'projects/%s/sinks/%s' % (project, sink_name) @@ -336,6 +344,10 @@ def metric_get(self, project, metric_name): :type metric_name: string :param metric_name: the name of the metric + + :rtype: dict + :returns: The metric object returned from the API (converted from a + protobuf to a dictionary). """ options = None path = 'projects/%s/metrics/%s' % (project, metric_name) @@ -362,6 +374,10 @@ def metric_update(self, project, metric_name, filter_, description): :type description: string :param description: description of the metric. + + :rtype: dict + :returns: The metric object returned from the API (converted from a + protobuf to a dictionary). """ options = None path = 'projects/%s/metrics/%s' % (project, metric_name) diff --git a/gcloud/logging/connection.py b/gcloud/logging/connection.py index fa60e181b3fd..6cad3f5ed03f 100644 --- a/gcloud/logging/connection.py +++ b/gcloud/logging/connection.py @@ -253,6 +253,9 @@ def sink_get(self, project, sink_name): :type sink_name: string :param sink_name: the name of the sink + + :rtype: dict + :returns: The JSON sink object returned from the API. """ target = '/projects/%s/sinks/%s' % (project, sink_name) return self._connection.api_request(method='GET', path=target) @@ -388,6 +391,9 @@ def metric_get(self, project, metric_name): :type metric_name: string :param metric_name: the name of the metric + + :rtype: dict + :returns: The JSON metric object returned from the API. """ target = '/projects/%s/metrics/%s' % (project, metric_name) return self._connection.api_request(method='GET', path=target) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index ad2d4b70ca77..0c781c73c9d4 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -118,6 +118,9 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type http_request: dict or :class:`NoneType` :param http_request: (optional) info about HTTP request associated with the entry + + :rtype: dict + :returns: The JSON resource created. """ resource = { 'logName': self.full_name, diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index b05269e39ac5..b3a1f282a5fb 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -121,6 +121,9 @@ def exists(self, client=None): :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current metric. + + :rtype: bool + :returns: Boolean indicating existence of the metric. """ client = self._require_client(client) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index 07a6dba2a0d0..9cde122d5851 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -125,6 +125,9 @@ def exists(self, client=None): :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. + + :rtype: bool + :returns: Boolean indicating existence of the sink. """ client = self._require_client(client) diff --git a/gcloud/logging/test__gax.py b/gcloud/logging/test__gax.py index d68082109cde..a47ad12f0357 100644 --- a/gcloud/logging/test__gax.py +++ b/gcloud/logging/test__gax.py @@ -898,7 +898,8 @@ class _DummyException(AbortionError): code = status_code def __init__(self): - pass + super(_DummyException, self).__init__( + None, None, self.code, None) return _DummyException() diff --git a/gcloud/monitoring/client.py b/gcloud/monitoring/client.py index 25e729c5fc9c..efe47c23e538 100644 --- a/gcloud/monitoring/client.py +++ b/gcloud/monitoring/client.py @@ -180,6 +180,9 @@ def metric_descriptor(self, type_, :type display_name: string :param display_name: An optional concise name for the metric. + + :rtype: :class:`MetricDescriptor` + :returns: The metric descriptor created with the passed-in arguments. """ return MetricDescriptor( self, type_, diff --git a/gcloud/monitoring/query.py b/gcloud/monitoring/query.py index 44e7f917de87..372959f53e9f 100644 --- a/gcloud/monitoring/query.py +++ b/gcloud/monitoring/query.py @@ -409,6 +409,10 @@ def reduce(self, cross_series_reducer, *group_by_fields): def iter(self, headers_only=False, page_size=None): """Yield all time series objects selected by the query. + The generator returned iterates over + :class:`~gcloud.monitoring.timeseries.TimeSeries` objects + containing points ordered from oldest to newest. + Note that the :class:`Query` object itself is an iterable, such that the following are equivalent:: @@ -428,9 +432,6 @@ def iter(self, headers_only=False, page_size=None): points to return per page. This can be used to control how far the iterator reads ahead. - :rtype: iterator over :class:`~gcloud.monitoring.timeseries.TimeSeries` - :returns: Time series objects, containing points ordered from oldest - to newest. :raises: :exc:`ValueError` if the query time interval has not been specified. """ @@ -481,8 +482,8 @@ def _build_query_params(self, headers_only=False, page_size=None, page_token=None): """Yield key-value pairs for the URL query string. - We use a series of key-value pairs instead of a ``dict`` to allow for - repeated fields. + We use a series of key-value pairs (suitable for passing to + ``urlencode``) instead of a ``dict`` to allow for repeated fields. :type headers_only: boolean :param headers_only: @@ -494,10 +495,6 @@ def _build_query_params(self, headers_only=False, :type page_token: string or None :param page_token: A token to continue the retrieval. - - :rtype: iterator over tuples - :returns: - Key-value pairs suitable for passing to ``urlencode``. """ yield 'filter', self.filter diff --git a/gcloud/pubsub/_gax.py b/gcloud/pubsub/_gax.py index 28ac6c23e294..9f9e8ef7eee6 100644 --- a/gcloud/pubsub/_gax.py +++ b/gcloud/pubsub/_gax.py @@ -133,9 +133,6 @@ def topic_delete(self, topic_path): :type topic_path: string :param topic_path: fully-qualified path of the new topic, in format ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. """ try: self._gax_api.delete_topic(topic_path) diff --git a/gcloud/pubsub/client.py b/gcloud/pubsub/client.py index 24c51f4697c2..bb610e3fd68e 100644 --- a/gcloud/pubsub/client.py +++ b/gcloud/pubsub/client.py @@ -24,6 +24,7 @@ from gcloud.pubsub.subscription import Subscription from gcloud.pubsub.topic import Topic +# pylint: disable=ungrouped-imports try: from google.pubsub.v1.publisher_api import ( PublisherApi as GeneratedPublisherAPI) @@ -37,6 +38,7 @@ GeneratedSubscriberAPI = GAXSubscriberAPI = None else: _HAVE_GAX = True +# pylint: enable=ungrouped-imports _USE_GAX = _HAVE_GAX and (os.environ.get('GCLOUD_ENABLE_GAX') is not None) diff --git a/gcloud/pubsub/message.py b/gcloud/pubsub/message.py index b309950a88d5..3ed97a33e095 100644 --- a/gcloud/pubsub/message.py +++ b/gcloud/pubsub/message.py @@ -81,6 +81,9 @@ def from_api_repr(cls, api_repr): :type api_repr: dict or None :param api_repr: The API representation of the message + + :rtype: :class:`Message` + :returns: The message created from the response. """ data = base64.b64decode(api_repr.get('data', b'')) instance = cls( diff --git a/gcloud/pubsub/subscription.py b/gcloud/pubsub/subscription.py index e2050fb06211..83493d529295 100644 --- a/gcloud/pubsub/subscription.py +++ b/gcloud/pubsub/subscription.py @@ -193,6 +193,9 @@ def exists(self, client=None): :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current subscription's topic. + + :rtype: bool + :returns: Boolean indicating existence of the subscription. """ client = self._require_client(client) api = client.subscriber_api diff --git a/gcloud/pubsub/test__gax.py b/gcloud/pubsub/test__gax.py index 2426d2dfb7e8..70e4249d7e47 100644 --- a/gcloud/pubsub/test__gax.py +++ b/gcloud/pubsub/test__gax.py @@ -749,7 +749,8 @@ class _DummyException(AbortionError): code = status_code def __init__(self): - pass + super(_DummyException, self).__init__( + None, None, self.code, None) return _DummyException() diff --git a/gcloud/pubsub/topic.py b/gcloud/pubsub/topic.py index 568434789ac4..c0706649c735 100644 --- a/gcloud/pubsub/topic.py +++ b/gcloud/pubsub/topic.py @@ -82,6 +82,9 @@ def subscription(self, name, ack_deadline=None, push_endpoint=None): :param push_endpoint: URL to which messages will be pushed by the back-end. If not set, the application must pull messages. + + :rtype: :class:`Subscription` + :returns: The subscription created with the passed in arguments. """ return Subscription(name, self, ack_deadline=ack_deadline, push_endpoint=push_endpoint) @@ -165,6 +168,9 @@ def exists(self, client=None): :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current topic. + + :rtype: bool + :returns: Boolean indicating existence of the topic. """ client = self._require_client(client) api = client.publisher_api diff --git a/gcloud/resource_manager/project.py b/gcloud/resource_manager/project.py index bf3c0e01d18b..4624d8a27241 100644 --- a/gcloud/resource_manager/project.py +++ b/gcloud/resource_manager/project.py @@ -73,6 +73,7 @@ def from_api_repr(cls, resource, client): :param client: The Client used with this project. :rtype: :class:`gcloud.resource_manager.project.Project` + :returns: The project created. """ project = cls(project_id=resource['projectId'], client=client) project.set_properties_from_api_repr(resource) @@ -173,6 +174,9 @@ def exists(self, client=None): :data:`NoneType ` :param client: the client to use. If not passed, falls back to the client stored on the current project. + + :rtype: bool + :returns: Boolean indicating existence of the project. """ client = self._require_client(client) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index e55fcf179a5f..538b5969f5b2 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -166,6 +166,9 @@ def _base64_md5hash(buffer_object): :type buffer_object: bytes buffer :param buffer_object: Buffer containing bytes used to compute an MD5 hash (as base64). + + :rtype: str + :returns: A base64 encoded digest of the MD5 hash. """ hash_obj = md5() _write_buffer_to_hash(buffer_object, hash_obj) diff --git a/gcloud/storage/batch.py b/gcloud/storage/batch.py index 92aed69d3d0a..569ea3e5f2de 100644 --- a/gcloud/storage/batch.py +++ b/gcloud/storage/batch.py @@ -303,9 +303,6 @@ def _unpack_batch_response(response, content): :type content: str :param content: Response payload with a batch response. - - :rtype: generator - :returns: A generator of header, payload pairs. """ parser = Parser() message = _generate_faux_mime_message(parser, response, content) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index d4e82b61bdb5..1d79ae794ce8 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -161,9 +161,6 @@ def create(self, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. - - :rtype: :class:`gcloud.storage.bucket.Bucket` - :returns: The newly created bucket. """ client = self._require_client(client) query_params = {'project': client.project} @@ -558,14 +555,6 @@ def lifecycle_rules(self): @lifecycle_rules.setter def lifecycle_rules(self, rules): - """Update the lifecycle rules configured for this bucket. - - See: https://cloud.google.com/storage/docs/lifecycle and - https://cloud.google.com/storage/docs/json_api/v1/buckets - - :rtype: list(dict) - :returns: A sequence of mappings describing each lifecycle rule. - """ self._patch_property('lifecycle', {'rule': rules}) location = _scalar_property('location') diff --git a/gcloud/streaming/buffered_stream.py b/gcloud/streaming/buffered_stream.py index bf5dc66550d4..02f1d5888d40 100644 --- a/gcloud/streaming/buffered_stream.py +++ b/gcloud/streaming/buffered_stream.py @@ -42,6 +42,7 @@ def stream_exhausted(self): """Does the stream have bytes remaining beyond the buffer :rtype: boolean + :returns: Boolean indicating if the stream is exhausted. """ return self._stream_at_end @@ -50,6 +51,7 @@ def stream_end_position(self): """Point to which stream was read into the buffer :rtype: integer + :returns: The end-position of the stream. """ return self._end_pos @@ -58,6 +60,7 @@ def _bytes_remaining(self): """Bytes remaining to be read from the buffer :rtype: integer + :returns: The number of bytes remaining. """ return len(self._buffered_data) - self._buffer_pos @@ -66,6 +69,9 @@ def read(self, size=None): :type size: integer or None :param size: How many bytes to read (defaults to all remaining bytes). + + :rtype: str + :returns: The data read from the stream. """ if size is None or size < 0: raise ValueError( diff --git a/gcloud/streaming/exceptions.py b/gcloud/streaming/exceptions.py index 1b3a4f43286a..4ff4b9d44ded 100644 --- a/gcloud/streaming/exceptions.py +++ b/gcloud/streaming/exceptions.py @@ -49,6 +49,7 @@ def from_response(cls, http_response): :param http_response: the response which returned the error :rtype: :class:`HttpError` + :returns: The error created from the response. """ return cls(http_response.info, http_response.content, http_response.request_url) @@ -97,6 +98,7 @@ def from_response(cls, http_response): :param http_response: the response which returned the error :rtype: :class:`RetryAfterError` + :returns: The error created from the response. """ return cls(http_response.info, http_response.content, http_response.request_url, http_response.retry_after) diff --git a/gcloud/streaming/http_wrapper.py b/gcloud/streaming/http_wrapper.py index d6200694ab8a..6f4db884e343 100644 --- a/gcloud/streaming/http_wrapper.py +++ b/gcloud/streaming/http_wrapper.py @@ -140,6 +140,7 @@ def loggable_body(self): """Request body for logging purposes :rtype: str + :returns: The body to be logged. """ return self.__loggable_body @@ -162,6 +163,7 @@ def body(self): """Request body :rtype: str + :returns: The body of the request. """ return self.__body @@ -226,6 +228,7 @@ def length(self): for responses larger than ``sys.maxint``. :rtype: integer or long + :returns: The length of the response. """ if 'content-encoding' in self.info and 'content-range' in self.info: # httplib2 rewrites content-length in the case of a compressed @@ -243,6 +246,7 @@ def status_code(self): """HTTP status code :rtype: integer + :returns: The response status code. """ return int(self.info['status']) @@ -438,6 +442,7 @@ def get_http(**kwds): :param kwds: keyword arguments to pass to factories. :rtype: :class:`httplib2.Http` (or a workalike) + :returns: The HTTP object created. """ for factory in _HTTP_FACTORIES: http = factory(**kwds) diff --git a/gcloud/streaming/stream_slice.py b/gcloud/streaming/stream_slice.py index ada3c66e2169..52bcd1295a30 100644 --- a/gcloud/streaming/stream_slice.py +++ b/gcloud/streaming/stream_slice.py @@ -38,6 +38,7 @@ def length(self): For 32-bit python2.x, len() cannot exceed a 32-bit number. :rtype: integer + :returns: The max "length" of the stream. """ return self._max_bytes diff --git a/gcloud/streaming/test_transfer.py b/gcloud/streaming/test_transfer.py index dfaa00be70cb..8ba3d02e227f 100644 --- a/gcloud/streaming/test_transfer.py +++ b/gcloud/streaming/test_transfer.py @@ -1429,7 +1429,7 @@ def test_stream_file_already_complete_w_seekable_stream_unsynced(self): with self.assertRaises(CommunicationError): upload.stream_file() - def test_stream_file_already_complete_w_seekable_stream_synced(self): + def test_stream_file_already_complete_wo_seekable_method_synced(self): import os from gcloud.streaming.transfer import RESUMABLE_UPLOAD CONTENT = b'ABCDEFGHIJ' @@ -1445,6 +1445,38 @@ def test_stream_file_already_complete_w_seekable_stream_synced(self): upload._complete = True self.assertTrue(upload.stream_file(use_chunks=False) is response) + def test_stream_file_already_complete_w_seekable_method_true_synced(self): + import os + from gcloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + http = object() + stream = _StreamWithSeekableMethod(CONTENT, True) + stream.seek(0, os.SEEK_END) + response = object() + upload = self._makeOne(stream, chunksize=1024) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 128 + upload._initialize(http, _Request.URL) + upload._final_response = response + upload._complete = True + self.assertTrue(upload.stream_file(use_chunks=False) is response) + + def test_stream_file_already_complete_w_seekable_method_false(self): + import os + from gcloud.streaming.transfer import RESUMABLE_UPLOAD + CONTENT = b'ABCDEFGHIJ' + http = object() + stream = _StreamWithSeekableMethod(CONTENT, False) + stream.seek(0, os.SEEK_END) + response = object() + upload = self._makeOne(stream, chunksize=1024) + upload.strategy = RESUMABLE_UPLOAD + upload._server_chunk_granularity = 128 + upload._initialize(http, _Request.URL) + upload._final_response = response + upload._complete = True + self.assertTrue(upload.stream_file(use_chunks=False) is response) + def test_stream_file_incomplete(self): from six.moves import http_client from gcloud._testing import _Monkey @@ -1835,6 +1867,16 @@ def close(self): self._closed = True +class _StreamWithSeekableMethod(_Stream): + + def __init__(self, to_read=b'', seekable=True): + super(_StreamWithSeekableMethod, self).__init__(to_read) + self._seekable = seekable + + def seekable(self): + return self._seekable + + class _Request(object): __slots__ = ('url', 'http_method', 'body', 'headers', 'loggable_body') URL = 'http://example.com/api' diff --git a/gcloud/streaming/transfer.py b/gcloud/streaming/transfer.py index 7ef439d67f36..ac02951dd18e 100644 --- a/gcloud/streaming/transfer.py +++ b/gcloud/streaming/transfer.py @@ -76,9 +76,10 @@ def __repr__(self): @property def close_stream(self): - """Should this instance close the stream when deleted + """Should this instance close the stream when deleted. :rtype: boolean + :returns: Boolean indicated if the stream should be closed. """ return self._close_stream @@ -87,6 +88,7 @@ def http(self): """Http instance used to perform requests. :rtype: :class:`httplib2.Http` (or workalike) + :returns: The HTTP object used for requests. """ return self._http @@ -97,6 +99,7 @@ def bytes_http(self): Defaults to :attr:`http`. :rtype: :class:`httplib2.Http` (or workalike) + :returns: The HTTP object used for binary requests. """ return self._bytes_http or self.http @@ -114,6 +117,7 @@ def num_retries(self): """How many retries should the transfer attempt :rtype: integer + :returns: The number of retries allowed. """ return self._num_retries @@ -136,6 +140,7 @@ def stream(self): """Stream to/from which data is downloaded/uploaded. :rtype: file-like object + :returns: The stream that sends/receives data. """ return self._stream @@ -144,6 +149,7 @@ def url(self): """URL to / from which data is downloaded/uploaded. :rtype: string + :returns: The URL where data is sent/received. """ return self._url @@ -170,6 +176,8 @@ def initialized(self): """Has the instance been initialized :rtype: boolean + :returns: Boolean indicating if the current transfer + has been initialized. """ return self.url is not None and self.http is not None @@ -239,6 +247,9 @@ def from_file(cls, filename, overwrite=False, auto_transfer=True, **kwds): :type kwds: dict :param kwds: keyword arguments: passed through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Download` + :returns: The download initiated from the file passed. """ path = os.path.expanduser(filename) if os.path.exists(path) and not overwrite: @@ -263,6 +274,9 @@ def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): :type kwds: dict :param kwds: keyword arguments: passed through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Download` + :returns: The download initiated from the stream passed. """ return cls(stream, auto_transfer=auto_transfer, total_size=total_size, **kwds) @@ -272,6 +286,7 @@ def progress(self): """Number of bytes have been downloaded. :rtype: integer >= 0 + :returns: The number of downloaded bytes. """ return self._progress @@ -280,6 +295,7 @@ def total_size(self): """Total number of bytes to be downloaded. :rtype: integer or None + :returns: The total number of bytes to download. """ return self._total_size @@ -288,6 +304,7 @@ def encoding(self): """'Content-Encoding' used to transfer the file :rtype: string or None + :returns: The encoding of the downloaded content. """ return self._encoding @@ -431,6 +448,7 @@ def _compute_end_byte(self, start, end=None, use_chunks=True): :type use_chunks: boolean :param use_chunks: If False, ignore :attr:`chunksize`. + :rtype: str :returns: Last byte to use in a 'Range' header, or None. """ end_byte = end @@ -642,6 +660,9 @@ def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): :type kwds: dict :param kwds: keyword arguments: passed through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Upload` + :returns: The upload initiated from the file passed. """ path = os.path.expanduser(filename) if not mime_type: @@ -673,6 +694,9 @@ def from_stream(cls, stream, mime_type, :type kwds: dict :param kwds: keyword arguments: passed through to :meth:`_Transfer.__init__()`. + + :rtype: :class:`Upload` + :returns: The upload initiated from the stream passed. """ if mime_type is None: raise ValueError( @@ -685,6 +709,7 @@ def complete(self): """Has the entire stream been uploaded. :rtype: boolean + :returns: Boolean indicated if the upload is complete. """ return self._complete @@ -693,6 +718,7 @@ def mime_type(self): """MIMEtype of the file being uploaded. :rtype: string + :returns: The mime-type of the upload. """ return self._mime_type @@ -701,6 +727,7 @@ def progress(self): """Bytes uploaded so far :rtype: integer + :returns: The amount uploaded so far. """ return self._progress @@ -709,6 +736,7 @@ def strategy(self): """Upload strategy to use :rtype: string or None + :returns: The strategy used to upload the data. """ return self._strategy @@ -733,6 +761,7 @@ def total_size(self): """Total size of the stream to be uploaded. :rtype: integer or None + :returns: The total size to be uploaded. """ return self._total_size @@ -929,6 +958,7 @@ def _get_range_header(response): :param response: response to be queried :rtype: string + :returns: The header used to determine the bytes range. """ # NOTE: Per RFC 2616[1]/7233[2][3], 'Range' is a request header, # not a response header. If the back-end is actually setting @@ -956,6 +986,9 @@ def initialize_upload(self, http_request, http): :raises: :exc:`ValueError` if the instance has not been configured with a strategy. + :rtype: :class:`~gcloud.streaming.http_wrapper.Response` + :returns: The response if the upload is resumable and auto transfer + is not used. """ if self.strategy is None: raise ValueError( @@ -988,6 +1021,9 @@ def _last_byte(range_header): :type range_header: string :param range_header: 'Range' header value per RFC 2616/7233 + + :rtype: int + :returns: The last byte from a range header. """ _, _, end = range_header.partition('-') return int(end) @@ -1016,7 +1052,10 @@ def stream_file(self, use_chunks=True): :type use_chunks: boolean :param use_chunks: If False, send the stream in a single request. - Otherwise, send it in chunks. + Otherwise, send it in chunks. + + :rtype: :class:`gcloud.streaming.http_wrapper.Response` + :returns: The response for the final request made. """ if self.strategy != RESUMABLE_UPLOAD: raise ValueError( @@ -1082,12 +1121,15 @@ def _send_media_request(self, request, end): return response def _send_media_body(self, start): - """ Send the entire stream in a single request. + """Send the entire stream in a single request. Helper for :meth:`stream_file`: :type start: integer :param start: start byte of the range. + + :rtype: :class:`gcloud.streaming.http_wrapper.Response` + :returns: The response from the media upload request. """ self._ensure_initialized() if self.total_size is None: @@ -1115,6 +1157,9 @@ def _send_chunk(self, start): :type start: integer :param start: start byte of the range. + + :rtype: :class:`gcloud.streaming.http_wrapper.Response` + :returns: The response from the chunked upload request. """ self._ensure_initialized() no_log_body = self.total_size is None diff --git a/scripts/pylintrc_default b/scripts/pylintrc_default index 413ccd72ba41..df64b2784a50 100644 --- a/scripts/pylintrc_default +++ b/scripts/pylintrc_default @@ -92,11 +92,7 @@ load-plugins=pylint.extensions.check_docs # will be detected by our 100% code coverage. # # New opinions in pylint 1.6, enforcing PEP 257. #1968 for eventual fixes -# - catching-non-exception # - missing-raises-doc -# - missing-returns-doc -# - redundant-returns-doc -# - ungrouped-imports disable = maybe-no-member, no-member, @@ -106,11 +102,7 @@ disable = redefined-variable-type, wrong-import-position, no-name-in-module, - catching-non-exception, missing-raises-doc, - missing-returns-doc, - redundant-returns-doc, - ungrouped-imports [REPORTS] diff --git a/scripts/run_pylint.py b/scripts/run_pylint.py index 52449e352828..11b36f90571b 100644 --- a/scripts/run_pylint.py +++ b/scripts/run_pylint.py @@ -37,10 +37,6 @@ ] IGNORED_FILES = [ os.path.join('docs', 'conf.py'), - # Both these files cause pylint 1.6 to barf. See: - # https://github.com/PyCQA/pylint/issues/998 - os.path.join('gcloud', 'bigtable', 'happybase', 'connection.py'), - os.path.join('gcloud', 'streaming', 'http_wrapper.py'), 'setup.py', ] SCRIPTS_DIR = os.path.abspath(os.path.dirname(__file__)) @@ -70,7 +66,7 @@ } TEST_RC_REPLACEMENTS = { 'FORMAT': { - 'max-module-lines': 1900, + 'max-module-lines': 1950, }, } diff --git a/system_tests/bigtable.py b/system_tests/bigtable.py index 6933bc60847c..8418b002ed64 100644 --- a/system_tests/bigtable.py +++ b/system_tests/bigtable.py @@ -71,6 +71,9 @@ def _operation_wait(operation, max_attempts=5): :type max_attempts: int :param max_attempts: (Optional) The maximum number of times to check if the operation has finished. Defaults to 5. + + :rtype: bool + :returns: Boolean indicating if the operation finished. """ total_sleep = 0 while not operation.finished(): diff --git a/tox.ini b/tox.ini index 71697f05f210..a7c525cb9d78 100644 --- a/tox.ini +++ b/tox.ini @@ -133,10 +133,12 @@ commands = python {toxinidir}/scripts/run_pylint.py deps = pep8 - pylint + pylint >= 1.6.4 unittest2 psutil Sphinx +setenv = + PYTHONPATH = passenv = {[testenv:system-tests]passenv} [testenv:system-tests]