From 6c32cefa1e59becabb7dec621d60cda3c40f66a2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 29 Feb 2016 13:25:28 -0800 Subject: [PATCH] Moving Bigtable Row.commit_modifications() into commit(). Fixes #1548. --- docs/bigtable-data-api.rst | 14 -- gcloud/bigtable/happybase/table.py | 6 +- gcloud/bigtable/happybase/test_table.py | 15 +- gcloud/bigtable/row.py | 237 +++++++++++++++++++----- gcloud/bigtable/table.py | 8 +- gcloud/bigtable/test_row.py | 86 ++++++++- gcloud/bigtable/test_table.py | 15 ++ scripts/run_pylint.py | 2 +- 8 files changed, 307 insertions(+), 76 deletions(-) diff --git a/docs/bigtable-data-api.rst b/docs/bigtable-data-api.rst index 776fe4a8429f..1c2a4bc92509 100644 --- a/docs/bigtable-data-api.rst +++ b/docs/bigtable-data-api.rst @@ -70,13 +70,6 @@ in a batch via :meth:`commit() `: row.commit() -To send **append** mutations in batch, use -:meth:`commit_modifications() `: - -.. code:: python - - row.commit_modifications() - We have a small set of methods on the :class:`Row ` to build these mutations up. @@ -214,13 +207,6 @@ If accumulated mutations need to be dropped, use row.clear_mutations() -To clear **append** mutations, use -:meth:`clear_modification_rules() ` - -.. code:: python - - row.clear_modification_rules() - Reading Data ++++++++++++ diff --git a/gcloud/bigtable/happybase/table.py b/gcloud/bigtable/happybase/table.py index 0f6314ccdfd7..5b21c00137c0 100644 --- a/gcloud/bigtable/happybase/table.py +++ b/gcloud/bigtable/happybase/table.py @@ -603,12 +603,12 @@ def counter_inc(self, row, column, value=1): :rtype: int :returns: Counter value after incrementing. """ - row = self._low_level_table.row(row) + row = self._low_level_table.row(row, append=True) if isinstance(column, six.binary_type): column = column.decode('utf-8') column_family_id, column_qualifier = column.split(':') row.increment_cell_value(column_family_id, column_qualifier, value) - # See row.commit_modifications() will return a dictionary: + # See row._commit_modifications() will return a dictionary: # { # u'col-fam-id': { # b'col-name1': [ @@ -618,7 +618,7 @@ def counter_inc(self, row, column, value=1): # ... # }, # } - modified_cells = row.commit_modifications() + modified_cells = row.commit() # Get the cells in the modified column, column_cells = modified_cells[column_family_id][column_qualifier] # Make sure there is exactly one cell in the column. diff --git a/gcloud/bigtable/happybase/test_table.py b/gcloud/bigtable/happybase/test_table.py index b70abf77f2dc..16f3aa2f4d6b 100644 --- a/gcloud/bigtable/happybase/test_table.py +++ b/gcloud/bigtable/happybase/test_table.py @@ -871,10 +871,12 @@ def _counter_inc_helper(self, row, column, value, commit_result): table = self._makeOne(name, connection) # Mock the return values. table._low_level_table = _MockLowLevelTable() - table._low_level_table.row_values[row] = _MockLowLevelRow( + table._low_level_table.row_values[row] = row_obj = _MockLowLevelRow( row, commit_result=commit_result) + self.assertFalse(row_obj._append) result = table.counter_inc(row, column, value=value) + self.assertTrue(row_obj._append) incremented_value = value + _MockLowLevelRow.COUNTER_DEFAULT self.assertEqual(result, incremented_value) @@ -1431,8 +1433,10 @@ def list_column_families(self): self.list_column_families_calls += 1 return self.column_families - def row(self, row_key): - return self.row_values[row_key] + def row(self, row_key, append=None): + result = self.row_values[row_key] + result._append = append + return result def read_row(self, *args, **kwargs): self.read_row_calls.append((args, kwargs)) @@ -1447,8 +1451,9 @@ class _MockLowLevelRow(object): COUNTER_DEFAULT = 0 - def __init__(self, row_key, commit_result=None): + def __init__(self, row_key, append=None, commit_result=None): self.row_key = row_key + self._append = append self.counts = {} self.commit_result = commit_result @@ -1457,7 +1462,7 @@ def increment_cell_value(self, column_family_id, column, int_value): self.COUNTER_DEFAULT) self.counts[(column_family_id, column)] = count + int_value - def commit_modifications(self): + def commit(self): return self.commit_result diff --git a/gcloud/bigtable/row.py b/gcloud/bigtable/row.py index 028c697d0c3d..c4f59d4ffd9b 100644 --- a/gcloud/bigtable/row.py +++ b/gcloud/bigtable/row.py @@ -36,15 +36,48 @@ class Row(object): """Representation of a Google Cloud Bigtable Row. - .. note:: + A :class:`Row` accumulates mutations locally via these methods: + + * :meth:`set_cell`, + * :meth:`delete` + * :meth:`delete_cell` + * :meth:`delete_cells` + * :meth:`append_cell_value` + * :meth:`increment_cell_value` + + To actually send these mutations to the Google Cloud Bigtable API, you + must call :meth:`commit`. + + These methods split into two groups: those that are meant to augment + existing cell values and those that are meant to set / delete cell + values. In order to differentiate the two, the ``append`` argument + identifies what type of row mutations are intended. + + If ``append`` is :data:`False` (the default), then + :meth:`append_cell_value` and :meth:`increment_cell_value` cannot be used + to accumulate mutations. If ``append`` is :data:`True`, then **only** + :meth:`append_cell_value` and :meth:`increment_cell_value` can be used. + + The remaining methods can either be used directly:: + + >>> row = table.row(b'row-key1') + >>> row.set_cell(u'fam', b'col1', b'cell-val') + >>> row.delete_cell(u'fam', b'col2') + + or with a filter. If a ``filter_`` is set on the :class:`Row`, the + mutations must have an associated boolean ``state``:: + + >>> row_cond = table.row(b'row-key2', filter_=row_filter) + >>> row_cond.set_cell(u'fam', b'col', b'cell-val', state=True) + >>> row_cond.delete_cell(u'fam', b'col', state=False) + + In the filtered case, the mutations will be applied conditionally, based on + whether the filter matches any cells in the :class:`Row` or not. - A :class:`Row` accumulates mutations locally via the :meth:`set_cell`, - :meth:`delete`, :meth:`delete_cell` and :meth:`delete_cells` methods. - To actually send these mutations to the Google Cloud Bigtable API, you - must call :meth:`commit`. If a ``filter_`` is set on the :class:`Row`, - the mutations must have an associated state: :data:`True` or - :data:`False`. The mutations will be applied conditionally, based on - whether the filter matches any cells in the :class:`Row` or not. + .. warning:: + + At most one of ``filter_`` and ``append`` can be used in a + :class:`Row`. :type row_key: bytes :param row_key: The key for the current row. @@ -59,24 +92,41 @@ class Row(object): When :meth:`commit`-ed, the mutations for the :data:`True` state will be applied if the filter matches any cells in the row, otherwise the :data:`False` state will be. + + :type append: bool + :param append: (Optional) Flag to determine if the row should be used + for append mutations. If :data:`True`, then only + :meth:`append_cell_value` and :meth:`increment_cell_value` + can be used for accumulating mutations. + + :raises: :class:`ValueError ` if both + ``filter_`` and ``append`` are used. """ ALL_COLUMNS = object() """Sentinel value used to indicate all columns in a column family.""" - def __init__(self, row_key, table, filter_=None): + def __init__(self, row_key, table, filter_=None, append=False): + if append and filter_ is not None: + raise ValueError('At most one of filter_ and append can be set') self._row_key = _to_bytes(row_key) self._table = table self._filter = filter_ - self._rule_pb_list = [] - if self._filter is None: - self._pb_mutations = [] - self._true_pb_mutations = None - self._false_pb_mutations = None - else: - self._pb_mutations = None + self._append = append + self._rule_pb_list = None + self._pb_mutations = None + self._true_pb_mutations = None + self._false_pb_mutations = None + + # We've already checked that at most one of filter_!=None, + # append=True has occurred. + if self._filter is not None: self._true_pb_mutations = [] self._false_pb_mutations = [] + elif self._append: + self._rule_pb_list = [] + else: + self._pb_mutations = [] def _get_mutations(self, state=None): """Gets the list of mutations for a given state. @@ -108,6 +158,26 @@ def _get_mutations(self, state=None): else: return self._false_pb_mutations + @property + def accumulation_type(self): + """Identify what type of mutations this row can accumulate. + + .. note:: + + This method assumes the :class:`Row` is not in a pathological + state that is not possible to occur via public methods (e.g. + ``filter_`` and ``append`` set simultaneously). + + :rtype: str + :returns: One of ``direct``, ``conditional`` or ``append`` + """ + if self._filter is not None: + return 'conditional' + elif self._append: + return 'append' + else: + return 'direct' + def set_cell(self, column_family_id, column, value, timestamp=None, state=None): """Sets a value in this row. @@ -144,7 +214,12 @@ def set_cell(self, column_family_id, column, value, timestamp=None, :param state: (Optional) The state that the mutation should be applied in. Unset if the mutation is not conditional, otherwise :data:`True` or :data:`False`. + + :raises: :class:`ValueError ` if the + :class:`Row` is an append row """ + if self._append: + raise ValueError('set_cell() cannot be used on an append Row.') column = _to_bytes(column) if isinstance(value, six.integer_types): value = _PACK_I64(value) @@ -174,7 +249,7 @@ def append_cell_value(self, column_family_id, column, value): This method adds a read-modify rule protobuf to the accumulated read-modify rules on this :class:`Row`, but does not make an API request. To actually send an API request (with the rules) to the - Google Cloud Bigtable API, call :meth:`commit_modifications`. + Google Cloud Bigtable API, call :meth:`commit`. :type column_family_id: str :param column_family_id: The column family that contains the column. @@ -189,7 +264,13 @@ def append_cell_value(self, column_family_id, column, value): :param value: The value to append to the existing value in the cell. If the targeted cell is unset, it will be treated as containing the empty string. + + :raises: :class:`ValueError ` if the + :class:`Row` is not an append row """ + if not self._append: + raise ValueError('append_cell_value() can only be used on an ' + 'append Row.') column = _to_bytes(column) value = _to_bytes(value) rule_pb = data_pb2.ReadModifyWriteRule(family_name=column_family_id, @@ -208,7 +289,7 @@ def increment_cell_value(self, column_family_id, column, int_value): This method adds a read-modify rule protobuf to the accumulated read-modify rules on this :class:`Row`, but does not make an API request. To actually send an API request (with the rules) to the - Google Cloud Bigtable API, call :meth:`commit_modifications`. + Google Cloud Bigtable API, call :meth:`commit`. :type column_family_id: str :param column_family_id: The column family that contains the column. @@ -226,7 +307,13 @@ def increment_cell_value(self, column_family_id, column, int_value): must contain an 8-byte value (interpreted as a 64-bit big-endian signed integer), or the entire request will fail. + + :raises: :class:`ValueError ` if the + :class:`Row` is not an append row """ + if not self._append: + raise ValueError('increment_cell_value() can only be used on an ' + 'append Row.') column = _to_bytes(column) rule_pb = data_pb2.ReadModifyWriteRule(family_name=column_family_id, column_qualifier=column, @@ -247,7 +334,12 @@ def delete(self, state=None): :param state: (Optional) The state that the mutation should be applied in. Unset if the mutation is not conditional, otherwise :data:`True` or :data:`False`. + + :raises: :class:`ValueError ` if the + :class:`Row` is an append row """ + if self._append: + raise ValueError('delete() cannot be used on an append Row.') mutation_val = data_pb2.Mutation.DeleteFromRow() mutation_pb = data_pb2.Mutation(delete_from_row=mutation_val) self._get_mutations(state).append(mutation_pb) @@ -280,7 +372,12 @@ def delete_cell(self, column_family_id, column, time_range=None, :param state: (Optional) The state that the mutation should be applied in. Unset if the mutation is not conditional, otherwise :data:`True` or :data:`False`. + + :raises: :class:`ValueError ` if the + :class:`Row` is an append row """ + if self._append: + raise ValueError('delete_cell() cannot be used on an append Row.') self.delete_cells(column_family_id, [column], time_range=time_range, state=state) @@ -314,7 +411,12 @@ def delete_cells(self, column_family_id, columns, time_range=None, :param state: (Optional) The state that the mutation should be applied in. Unset if the mutation is not conditional, otherwise :data:`True` or :data:`False`. + + :raises: :class:`ValueError ` if the + :class:`Row` is an append row """ + if self._append: + raise ValueError('delete_cells() cannot be used on an append Row.') mutations_list = self._get_mutations(state) if columns is self.ALL_COLUMNS: mutation_val = data_pb2.Mutation.DeleteFromFamily( @@ -410,15 +512,30 @@ def _commit_check_and_mutate(self): return resp.predicate_matched def clear_mutations(self): - """Removes all currently accumulated mutations on the current row.""" - if self._filter is None: - del self._pb_mutations[:] - else: + """Removes all currently accumulated mutations on the current row. + + .. note:: + + This method assumes the :class:`Row` is not in a pathological + state that is not possible to occur via public methods (e.g. + ``filter_`` and ``append`` set simultaneously). + """ + if self._filter is not None: del self._true_pb_mutations[:] del self._false_pb_mutations[:] + elif self._append: + del self._rule_pb_list[:] + else: + del self._pb_mutations[:] def commit(self): - """Makes a ``MutateRow`` or ``CheckAndMutateRow`` API request. + """Makes an API request. + + Sends request to one of three RPC methods: + + * ``MutateRow`` in the **direct** case + * ``CheckAndMutateRow`` in the **conditional** case + * ``ReadModifyWriteRow`` in the **append** case If no mutations have been created in the row, no request is made. @@ -434,28 +551,62 @@ def commit(self): any cells in the :class:`Row` or not. (Each method which adds a mutation has a ``state`` parameter for this purpose.) - :rtype: :class:`bool` or :data:`NoneType ` - :returns: :data:`None` if there is no filter, otherwise a flag - indicating if the filter was matched (which also - indicates which set of mutations were applied by the server). + In the case that this :class:`Row` has accumulated append mutations, + the response will be a dictionary containing the updated + cells as nested dictionaries:: + + >>> row.commit() + { + u'col-fam-id': { + b'col-name1': [ + (b'cell-val', datetime.datetime(...)), + (b'cell-val-newer', datetime.datetime(...)), + ], + b'col-name2': [ + (b'altcol-cell-val', datetime.datetime(...)), + ], + }, + u'col-fam-id2': { + b'col-name3-but-other-fam': [ + (b'foo', datetime.datetime(...)), + ], + }, + } + + This dictionary has column families as keys and dictionaries of columns + within the family as values. Each column contains a list of cells + modified. Each cell is represented with a two-tuple with the + value (in bytes) and the timestamp for the cell. + + .. note:: + + This method assumes the :class:`Row` is not in a pathological + state that is not possible to occur via public methods (e.g. + ``filter_`` and ``append`` set simultaneously). + + :rtype: :class:`bool`, :data:`NoneType ` or + :class:`dict` + :returns: In the filter case, returns a flag indicating if the filter + was matched (which also indicates which set of mutations + were applied by the server). In the append case, returns + a dictionary with the new contents of all modified cells. + Otherwise, returns :data:`None`. :raises: :class:`ValueError ` if the number of mutations exceeds the :data:`MAX_MUTATIONS`. """ - if self._filter is None: - result = self._commit_mutate() - else: + if self._filter is not None: result = self._commit_check_and_mutate() + elif self._append: + result = self._commit_modifications() + else: + result = self._commit_mutate() # Reset mutations after commit-ing request. self.clear_mutations() return result - def clear_modification_rules(self): - """Removes all currently accumulated modifications on current row.""" - del self._rule_pb_list[:] - - def commit_modifications(self): + def _commit_modifications(self): """Makes a ``ReadModifyWriteRow`` API request. This commits modifications made by :meth:`append_cell_value` and @@ -470,7 +621,7 @@ def commit_modifications(self): .. code:: python - >>> row.commit_modifications() + >>> row._commit_modifications() { u'col-fam-id': { b'col-name1': [ @@ -494,23 +645,25 @@ def commit_modifications(self): dictionary of columns. Each column contains a list of cells modified. Each cell is represented with a two-tuple with the value (in bytes) and the timestamp for the cell. - + :raises: :class:`ValueError ` if the number of + mutations exceeds the :data:`MAX_MUTATIONS`. """ - if len(self._rule_pb_list) == 0: + num_mutations = len(self._rule_pb_list) + if num_mutations == 0: return {} + if num_mutations > MAX_MUTATIONS: + raise ValueError('%d total append mutations exceed the maximum ' + 'allowable %d.' % (num_mutations, MAX_MUTATIONS)) request_pb = messages_pb2.ReadModifyWriteRowRequest( table_name=self._table.name, row_key=self._row_key, rules=self._rule_pb_list, ) - # We expect a `.data_pb2.Row` client = self._table._cluster._client + # We expect a `.data_pb2.Row` row_response = client._data_stub.ReadModifyWriteRow( request_pb, client.timeout_seconds) - # Reset modifications after commit-ing request. - self.clear_modification_rules() - # NOTE: We expect row_response.key == self._row_key but don't check. return _parse_rmw_row_response(row_response) diff --git a/gcloud/bigtable/table.py b/gcloud/bigtable/table.py index 81e967218a00..fd72b1f60b7d 100644 --- a/gcloud/bigtable/table.py +++ b/gcloud/bigtable/table.py @@ -95,7 +95,7 @@ def column_family(self, column_family_id, gc_rule=None): """ return ColumnFamily(column_family_id, self, gc_rule=gc_rule) - def row(self, row_key, filter_=None): + def row(self, row_key, filter_=None, append=False): """Factory to create a row associated with this table. :type row_key: bytes @@ -105,10 +105,14 @@ def row(self, row_key, filter_=None): :param filter_: (Optional) Filter to be used for conditional mutations. See :class:`.Row` for more details. + :type append: bool + :param append: (Optional) Flag to determine if the row should be used + for append mutations. + :rtype: :class:`.Row` :returns: A row owned by this table. """ - return Row(row_key, self, filter_=filter_) + return Row(row_key, self, filter_=filter_, append=append) def __eq__(self, other): if not isinstance(other, self.__class__): diff --git a/gcloud/bigtable/test_row.py b/gcloud/bigtable/test_row.py index b2385be2901c..b960d8a6c22f 100644 --- a/gcloud/bigtable/test_row.py +++ b/gcloud/bigtable/test_row.py @@ -34,6 +34,17 @@ def test_constructor(self): self.assertEqual(row._row_key, row_key) self.assertTrue(row._table is table) self.assertTrue(row._filter is filter_) + self.assertFalse(row._append) + + def test_constructor_append(self): + row_key = b'row_key' + table = object() + + row = self._makeOne(row_key, table, append=True) + self.assertEqual(row._row_key, row_key) + self.assertTrue(row._table is table) + self.assertEqual(row._filter, None) + self.assertTrue(row._append) def test_constructor_with_unicode(self): row_key = u'row_key' @@ -49,6 +60,20 @@ def test_constructor_with_non_bytes(self): with self.assertRaises(TypeError): self._makeOne(row_key, None) + def test_constructor_filter_and_append(self): + row_key = object() + with self.assertRaises(ValueError): + self._makeOne(row_key, None, filter_=object(), append=True) + + def test_accumulation_type(self): + row_key = b'row-key' + row1 = self._makeOne(row_key, None) + row2 = self._makeOne(row_key, None, filter_=object()) + row3 = self._makeOne(row_key, None, append=True) + self.assertEqual(row1.accumulation_type, 'direct') + self.assertEqual(row2.accumulation_type, 'conditional') + self.assertEqual(row3.accumulation_type, 'append') + def _get_mutations_helper(self, filter_=None, state=None): row_key = b'row_key' row = self._makeOne(row_key, None, filter_=filter_) @@ -152,12 +177,17 @@ def test_set_cell_with_non_null_timestamp(self): self._set_cell_helper(timestamp=timestamp, timestamp_micros=millis_granularity) + def test_set_cell_on_append(self): + row = self._makeOne(b'row-key', None, append=True) + with self.assertRaises(ValueError): + row.set_cell(u'fam', b'col', b'val') + def test_append_cell_value(self): from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 table = object() row_key = b'row_key' - row = self._makeOne(row_key, table) + row = self._makeOne(row_key, table, append=True) self.assertEqual(row._rule_pb_list, []) column = b'column' @@ -169,12 +199,17 @@ def test_append_cell_value(self): append_value=value) self.assertEqual(row._rule_pb_list, [expected_pb]) + def test_append_cell_value_without_append(self): + row = self._makeOne(b'row-key', None, append=False) + with self.assertRaises(ValueError): + row.append_cell_value(u'fam', b'col', b'val') + def test_increment_cell_value(self): from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 table = object() row_key = b'row_key' - row = self._makeOne(row_key, table) + row = self._makeOne(row_key, table, append=True) self.assertEqual(row._rule_pb_list, []) column = b'column' @@ -186,6 +221,11 @@ def test_increment_cell_value(self): increment_amount=int_value) self.assertEqual(row._rule_pb_list, [expected_pb]) + def test_increment_cell_value_without_append(self): + row = self._makeOne(b'row-key', None, append=False) + with self.assertRaises(ValueError): + row.increment_cell_value(u'fam', b'col', 123) + def test_delete(self): from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 @@ -199,6 +239,11 @@ def test_delete(self): ) self.assertEqual(row._pb_mutations, [expected_pb]) + def test_delete_on_append(self): + row = self._makeOne(b'row-key', None, append=True) + with self.assertRaises(ValueError): + row.delete() + def test_delete_cell(self): klass = self._getTargetClass() @@ -235,6 +280,11 @@ def delete_cells(self, *args, **kwargs): 'time_range': time_range, }]) + def test_delete_cell_on_append(self): + row = self._makeOne(b'row-key', None, append=True) + with self.assertRaises(ValueError): + row.delete_cell(u'fam', b'col') + def test_delete_cells_non_iterable(self): row_key = b'row_key' column_family_id = u'column_family_id' @@ -357,6 +407,11 @@ def test_delete_cells_with_string_columns(self): ) self.assertEqual(row._pb_mutations, [expected_pb1, expected_pb2]) + def test_delete_cells_on_append(self): + row = self._makeOne(b'row-key', None, append=True) + with self.assertRaises(ValueError): + row.delete_cells(u'fam', [b'col1', b'col2']) + def test_commit(self): from google.protobuf import empty_pb2 from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 @@ -545,7 +600,7 @@ def test_commit_with_filter_no_mutations(self): # Make sure no request was sent. self.assertEqual(stub.method_calls, []) - def test_commit_modifications(self): + def test_commit_append(self): from gcloud._testing import _Monkey from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2 from gcloud.bigtable._generated import ( @@ -560,7 +615,7 @@ def test_commit_modifications(self): timeout_seconds = 87 client = _Client(timeout_seconds=timeout_seconds) table = _Table(table_name, client=client) - row = self._makeOne(row_key, table) + row = self._makeOne(row_key, table, append=True) # Create request_pb value = b'bytes-value' @@ -594,7 +649,7 @@ def mock_parse_rmw_row_response(row_response): # Perform the method and check the result. with _Monkey(MUT, _parse_rmw_row_response=mock_parse_rmw_row_response): row.append_cell_value(column_family_id, column, value) - result = row.commit_modifications() + result = row.commit() self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( @@ -602,31 +657,44 @@ def mock_parse_rmw_row_response(row_response): (request_pb, timeout_seconds), {}, )]) - self.assertEqual(row._pb_mutations, []) + self.assertEqual(row._pb_mutations, None) self.assertEqual(row._true_pb_mutations, None) self.assertEqual(row._false_pb_mutations, None) self.assertEqual(row_responses, [response_pb]) self.assertEqual(row._rule_pb_list, []) - def test_commit_modifications_no_rules(self): + def test_commit_append_no_rules(self): from gcloud.bigtable._testing import _FakeStub row_key = b'row_key' client = _Client() table = _Table(None, client=client) - row = self._makeOne(row_key, table) + row = self._makeOne(row_key, table, append=True) self.assertEqual(row._rule_pb_list, []) # Patch the stub used by the API method. client._data_stub = stub = _FakeStub() # Perform the method and check the result. - result = row.commit_modifications() + result = row.commit() self.assertEqual(result, {}) # Make sure no request was sent. self.assertEqual(stub.method_calls, []) + def test_commit_append_too_many_mutations(self): + from gcloud._testing import _Monkey + from gcloud.bigtable import row as MUT + + row_key = b'row_key' + table = object() + row = self._makeOne(row_key, table, append=True) + row._rule_pb_list = [1, 2, 3] + num_mutations = len(row._rule_pb_list) + with _Monkey(MUT, MAX_MUTATIONS=num_mutations - 1): + with self.assertRaises(ValueError): + row.commit() + class Test_BoolFilter(unittest2.TestCase): diff --git a/gcloud/bigtable/test_table.py b/gcloud/bigtable/test_table.py index 882db22c4d51..b2367a271a28 100644 --- a/gcloud/bigtable/test_table.py +++ b/gcloud/bigtable/test_table.py @@ -69,6 +69,21 @@ def test_row_factory(self): self.assertEqual(row._row_key, row_key) self.assertEqual(row._table, table) self.assertEqual(row._filter, filter_) + self.assertFalse(row._append) + + def test_row_factory_append(self): + from gcloud.bigtable.row import Row + + table_id = 'table-id' + table = self._makeOne(table_id, None) + row_key = b'row_key' + row = table.row(row_key, append=True) + + self.assertTrue(isinstance(row, Row)) + self.assertEqual(row._row_key, row_key) + self.assertEqual(row._table, table) + self.assertEqual(row._filter, None) + self.assertTrue(row._append) def test___eq__(self): table_id = 'table_id' diff --git a/scripts/run_pylint.py b/scripts/run_pylint.py index bfd8cf020f01..c36de6369080 100644 --- a/scripts/run_pylint.py +++ b/scripts/run_pylint.py @@ -66,7 +66,7 @@ } TEST_RC_REPLACEMENTS = { 'FORMAT': { - 'max-module-lines': 1800, + 'max-module-lines': 1875, }, }