@@ -649,11 +649,11 @@ def test__save_row_no_cell(self):
649649 self .assertTrue (prd ._rows [ROW_KEY ] is row )
650650
651651 def test_invalid_last_scanned_row_key_on_start (self ):
652- from gcloud .bigtable .row_data import ReadRowsResponseError
652+ from gcloud .bigtable .row_data import InvalidReadRowsResponse
653653 response = _ReadRowsResponseV2 (chunks = (), last_scanned_row_key = 'ABC' )
654654 iterator = _MockCancellableIterator (response )
655655 prd = self ._makeOne (iterator )
656- with self .assertRaises (ReadRowsResponseError ):
656+ with self .assertRaises (InvalidReadRowsResponse ):
657657 prd .consume_next ()
658658
659659 def test_valid_last_scanned_row_key_on_start (self ):
@@ -666,16 +666,16 @@ def test_valid_last_scanned_row_key_on_start(self):
666666 self .assertEqual (prd ._last_scanned_row_key , 'AFTER' )
667667
668668 def test_invalid_empty_chunk (self ):
669- from gcloud .bigtable .row_data import ReadRowsResponseError
669+ from gcloud .bigtable .row_data import InvalidChunk
670670 chunks = _generate_cell_chunks (['' ])
671671 response = _ReadRowsResponseV2 (chunks )
672672 iterator = _MockCancellableIterator (response )
673673 prd = self ._makeOne (iterator )
674- with self .assertRaises (ReadRowsResponseError ):
674+ with self .assertRaises (InvalidChunk ):
675675 prd .consume_next ()
676676
677677 def test_invalid_empty_second_chunk (self ):
678- from gcloud .bigtable .row_data import ReadRowsResponseError
678+ from gcloud .bigtable .row_data import InvalidChunk
679679 chunks = _generate_cell_chunks (['' , '' ])
680680 first = chunks [0 ]
681681 first .row_key = b'RK'
@@ -684,32 +684,23 @@ def test_invalid_empty_second_chunk(self):
684684 response = _ReadRowsResponseV2 (chunks )
685685 iterator = _MockCancellableIterator (response )
686686 prd = self ._makeOne (iterator )
687- with self .assertRaises (ReadRowsResponseError ):
687+ with self .assertRaises (InvalidChunk ):
688688 prd .consume_next ()
689689
690- # JSON Error cases
690+ # JSON Error cases: invalid chunks
691691
692692 def _fail_during_consume (self , testcase_name ):
693- from gcloud .bigtable .row_data import ReadRowsResponseError
694- chunks , _ = self ._load_json_test (testcase_name )
693+ from gcloud .bigtable .row_data import InvalidChunk
694+ chunks , results = self ._load_json_test (testcase_name )
695695 response = _ReadRowsResponseV2 (chunks )
696696 iterator = _MockCancellableIterator (response )
697697 prd = self ._makeOne (iterator )
698- with self .assertRaises (ReadRowsResponseError ):
698+ with self .assertRaises (InvalidChunk ):
699699 prd .consume_next ()
700-
701- def _fail_during_rows (self , testcase_name ):
702- from gcloud .bigtable .row_data import ReadRowsResponseError
703- chunks , _ = self ._load_json_test (testcase_name )
704- response = _ReadRowsResponseV2 (chunks )
705- iterator = _MockCancellableIterator (response )
706- prd = self ._makeOne (iterator )
707- prd .consume_next ()
708- with self .assertRaises (ReadRowsResponseError ):
709- _ = prd .rows
710-
711- def test_invalid_no_commit (self ):
712- self ._fail_during_rows ('invalid - no commit' )
700+ expected_result = self ._sort_flattend_cells (
701+ [result for result in results if not result ['error' ]])
702+ flattened = self ._sort_flattend_cells (_flatten_cells (prd ))
703+ self .assertEqual (flattened , expected_result )
713704
714705 def test_invalid_no_cell_key_before_commit (self ):
715706 self ._fail_during_consume ('invalid - no cell key before commit' )
@@ -727,9 +718,6 @@ def test_invalid_no_commit_between_rows(self):
727718 def test_invalid_no_commit_after_first_row (self ):
728719 self ._fail_during_consume ('invalid - no commit after first row' )
729720
730- def test_invalid_last_row_missing_commit (self ):
731- self ._fail_during_rows ('invalid - last row missing commit' )
732-
733721 def test_invalid_duplicate_row_key (self ):
734722 self ._fail_during_consume ('invalid - duplicate row key' )
735723
@@ -751,21 +739,44 @@ def test_invalid_reset_with_chunk(self):
751739 def test_invalid_commit_with_chunk (self ):
752740 self ._fail_during_consume ('invalid - commit with chunk' )
753741
742+ # JSON Error cases: incomplete final row
743+
744+ def _sort_flattend_cells (self , flattened ):
745+ import operator
746+ key_func = operator .itemgetter ('rk' , 'fm' , 'qual' )
747+ return sorted (flattened , key = key_func )
748+
749+ def _incomplete_final_row (self , testcase_name ):
750+ chunks , results = self ._load_json_test (testcase_name )
751+ response = _ReadRowsResponseV2 (chunks )
752+ iterator = _MockCancellableIterator (response )
753+ prd = self ._makeOne (iterator )
754+ prd .consume_next ()
755+ self .assertEqual (prd .state , prd .ROW_IN_PROGRESS )
756+ expected_result = self ._sort_flattend_cells (
757+ [result for result in results if not result ['error' ]])
758+ flattened = self ._sort_flattend_cells (_flatten_cells (prd ))
759+ self .assertEqual (flattened , expected_result )
760+
761+ def test_invalid_no_commit (self ):
762+ self ._incomplete_final_row ('invalid - no commit' )
763+
764+ def test_invalid_last_row_missing_commit (self ):
765+ self ._incomplete_final_row ('invalid - last row missing commit' )
766+
754767 # Non-error cases
755768
756769 _marker = object ()
757770
758771 def _match_results (self , testcase_name , expected_result = _marker ):
759- import operator
760- key_func = operator .itemgetter ('rk' , 'fm' , 'qual' )
761772 chunks , results = self ._load_json_test (testcase_name )
762773 response = _ReadRowsResponseV2 (chunks )
763774 iterator = _MockCancellableIterator (response )
764775 prd = self ._makeOne (iterator )
765776 prd .consume_next ()
766- flattened = sorted (_flatten_cells (prd ), key = key_func )
777+ flattened = self . _sort_flattend_cells (_flatten_cells (prd ))
767778 if expected_result is self ._marker :
768- expected_result = sorted (results , key = key_func )
779+ expected_result = self . _sort_flattend_cells (results )
769780 self .assertEqual (flattened , expected_result )
770781
771782 def test_bare_commit_implies_ts_zero (self ):
0 commit comments