diff --git a/system_tests/bigquery.py b/system_tests/bigquery.py index 6a3d8b22e913..b5370efec96f 100644 --- a/system_tests/bigquery.py +++ b/system_tests/bigquery.py @@ -17,7 +17,6 @@ import unittest from google.cloud import bigquery -from google.cloud.exceptions import Conflict from google.cloud.exceptions import Forbidden from retry import RetryErrors @@ -35,6 +34,7 @@ def _rate_limit_exceeded(forbidden): return any(error['reason'] == 'rateLimitExceeded' for error in forbidden._errors) + # We need to wait to stay within the rate limits. # The alternative outcome is a 403 Forbidden response from upstream, which # they return instead of the more appropriate 429. @@ -61,11 +61,22 @@ def setUp(self): self.to_delete = [] def tearDown(self): + from google.cloud.bigquery.dataset import Dataset from google.cloud.storage import Bucket + from google.cloud.exceptions import BadRequest + from google.cloud.exceptions import Conflict + + def _still_in_use(bad_request): + return any(error['reason'] == 'resourceInUse' + for error in bad_request._errors) + + retry_in_use = RetryErrors(BadRequest, error_predicate=_still_in_use) + retry_409 = RetryErrors(Conflict) for doomed in self.to_delete: if isinstance(doomed, Bucket): - retry = RetryErrors(Conflict) - retry(doomed.delete)(force=True) + retry_409(doomed.delete)(force=True) + elif isinstance(doomed, Dataset): + retry_in_use(doomed.delete)() else: doomed.delete()