From 17c2ec64fb2e963c87d21700b76b6f232485d926 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 16 Sep 2016 13:42:03 -0400 Subject: [PATCH 1/2] Retry 400s during dataset teardown. Closes #2318. --- system_tests/bigquery.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/system_tests/bigquery.py b/system_tests/bigquery.py index 6a3d8b22e913..a0146f564bf5 100644 --- a/system_tests/bigquery.py +++ b/system_tests/bigquery.py @@ -17,7 +17,6 @@ import unittest from google.cloud import bigquery -from google.cloud.exceptions import Conflict from google.cloud.exceptions import Forbidden from retry import RetryErrors @@ -35,6 +34,7 @@ def _rate_limit_exceeded(forbidden): return any(error['reason'] == 'rateLimitExceeded' for error in forbidden._errors) + # We need to wait to stay within the rate limits. # The alternative outcome is a 403 Forbidden response from upstream, which # they return instead of the more appropriate 429. @@ -61,11 +61,17 @@ def setUp(self): self.to_delete = [] def tearDown(self): + from google.cloud.bigquery.dataset import Dataset from google.cloud.storage import Bucket + from google.cloud.exceptions import BadRequest + from google.cloud.exceptions import Conflict + retry_400 = RetryErrors(BadRequest) + retry_409 = RetryErrors(Conflict) for doomed in self.to_delete: if isinstance(doomed, Bucket): - retry = RetryErrors(Conflict) - retry(doomed.delete)(force=True) + retry_409(doomed.delete)(force=True) + elif isinstance(doomed, Dataset): + retry_400(doomed.delete)() else: doomed.delete() From 0ac2ea5bfa14a3b768ea12069f16894eb2fc4dc8 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 16 Sep 2016 16:14:20 -0400 Subject: [PATCH 2/2] Add error predicate. Addresses: https://github.com/GoogleCloudPlatform/google-cloud-python/pull/2333#issuecomment-247684422 --- system_tests/bigquery.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/system_tests/bigquery.py b/system_tests/bigquery.py index a0146f564bf5..b5370efec96f 100644 --- a/system_tests/bigquery.py +++ b/system_tests/bigquery.py @@ -65,13 +65,18 @@ def tearDown(self): from google.cloud.storage import Bucket from google.cloud.exceptions import BadRequest from google.cloud.exceptions import Conflict - retry_400 = RetryErrors(BadRequest) + + def _still_in_use(bad_request): + return any(error['reason'] == 'resourceInUse' + for error in bad_request._errors) + + retry_in_use = RetryErrors(BadRequest, error_predicate=_still_in_use) retry_409 = RetryErrors(Conflict) for doomed in self.to_delete: if isinstance(doomed, Bucket): retry_409(doomed.delete)(force=True) elif isinstance(doomed, Dataset): - retry_400(doomed.delete)() + retry_in_use(doomed.delete)() else: doomed.delete()