From da6b9eb763a7309bdcc5f192019392f3f44bf933 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 9 Sep 2016 16:42:49 -0700 Subject: [PATCH] Retry entire test_paginate_files() storage system test. This is not just "taking the easy way out", it's really the most appropriate fix since there are so many assertions that can fail due to eventual consistency. (For example, asking for 2 blobs should have a next page when 3 are in the bucket, but this may break down due to eventual consistency.) Fixes #2217. Also - restoring test_second_level() to pre-#2252 (by retrying the entire test case) - restoring test_list_files() to pre-#2181 (by retrying the entire test case) - adding retries around remaining test cases that call list_blobs(): test_root_level_w_delimiter(), test_first_level() and test_third_level() - adding a helper to empty a bucket in setUpClass() helper (which also uses list_blobs()) in both TestStoragePseudoHierarchy and TestStorageListFiles --- system_tests/storage.py | 57 ++++++++++++++++++++--------------------- 1 file changed, 28 insertions(+), 29 deletions(-) diff --git a/system_tests/storage.py b/system_tests/storage.py index f61de812aff4..6fce4fd85095 100644 --- a/system_tests/storage.py +++ b/system_tests/storage.py @@ -26,7 +26,6 @@ from system_test_utils import unique_resource_id from retry import RetryErrors -from retry import RetryResult HTTP = httplib2.Http() @@ -44,6 +43,19 @@ def _bad_copy(bad_request): error_predicate=_bad_copy) +def _empty_bucket(bucket): + """Empty a bucket of all existing blobs. + + This accounts (partially) for the eventual consistency of the + list blobs API call. + """ + for blob in bucket.list_blobs(): + try: + blob.delete() + except exceptions.NotFound: # eventual consistency + pass + + class Config(object): """Run-time configuration to be modified at set-up. @@ -216,8 +228,7 @@ class TestStorageListFiles(TestStorageFiles): def setUpClass(cls): super(TestStorageListFiles, cls).setUpClass() # Make sure bucket empty before beginning. - for blob in cls.bucket.list_blobs(): - blob.delete() + _empty_bucket(cls.bucket) logo_path = cls.FILES['logo']['path'] blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket) @@ -235,18 +246,13 @@ def tearDownClass(cls): for blob in cls.suite_blobs_to_delete: blob.delete() + @RetryErrors(unittest.TestCase.failureException) def test_list_files(self): - def _all_in_list(blobs): - return len(blobs) == len(self.FILENAMES) - - def _all_blobs(): - return list(self.bucket.list_blobs()) - - retry = RetryResult(_all_in_list) - all_blobs = retry(_all_blobs)() + all_blobs = list(self.bucket.list_blobs()) self.assertEqual(sorted(blob.name for blob in all_blobs), sorted(self.FILENAMES)) + @RetryErrors(unittest.TestCase.failureException) def test_paginate_files(self): truncation_size = 1 count = len(self.FILENAMES) - truncation_size @@ -277,11 +283,7 @@ class TestStoragePseudoHierarchy(TestStorageFiles): def setUpClass(cls): super(TestStoragePseudoHierarchy, cls).setUpClass() # Make sure bucket empty before beginning. - for blob in cls.bucket.list_blobs(): - try: - blob.delete() - except exceptions.NotFound: # eventual consistency - pass + _empty_bucket(cls.bucket) simple_path = cls.FILES['simple']['path'] blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket) @@ -297,6 +299,7 @@ def tearDownClass(cls): for blob in cls.suite_blobs_to_delete: blob.delete() + @RetryErrors(unittest.TestCase.failureException) def test_root_level_w_delimiter(self): iterator = self.bucket.list_blobs(delimiter='/') response = iterator.get_next_page_response() @@ -306,6 +309,7 @@ def test_root_level_w_delimiter(self): self.assertTrue(iterator.next_page_token is None) self.assertEqual(iterator.prefixes, set(['parent/'])) + @RetryErrors(unittest.TestCase.failureException) def test_first_level(self): iterator = self.bucket.list_blobs(delimiter='/', prefix='parent/') response = iterator.get_next_page_response() @@ -315,30 +319,25 @@ def test_first_level(self): self.assertTrue(iterator.next_page_token is None) self.assertEqual(iterator.prefixes, set(['parent/child/'])) + @RetryErrors(unittest.TestCase.failureException) def test_second_level(self): expected_names = [ 'parent/child/file21.txt', 'parent/child/file22.txt', ] - def _all_in_list(pair): - _, blobs = pair - return [blob.name for blob in blobs] == expected_names - - def _all_blobs(): - iterator = self.bucket.list_blobs(delimiter='/', - prefix='parent/child/') - response = iterator.get_next_page_response() - blobs = list(iterator.get_items_from_response(response)) - return iterator, blobs - - retry = RetryResult(_all_in_list) - iterator, _ = retry(_all_blobs)() + iterator = self.bucket.list_blobs(delimiter='/', + prefix='parent/child/') + response = iterator.get_next_page_response() + blobs = list(iterator.get_items_from_response(response)) + self.assertEqual([blob.name for blob in blobs], + expected_names) self.assertEqual(iterator.page_number, 1) self.assertTrue(iterator.next_page_token is None) self.assertEqual(iterator.prefixes, set(['parent/child/grand/', 'parent/child/other/'])) + @RetryErrors(unittest.TestCase.failureException) def test_third_level(self): # Pseudo-hierarchy can be arbitrarily deep, subject to the limit # of 1024 characters in the UTF-8 encoded name: