forked from googleapis/python-bigquery
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathretry.py
More file actions
101 lines (78 loc) · 3.17 KB
/
retry.py
File metadata and controls
101 lines (78 loc) · 3.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.api_core import exceptions
from google.api_core import retry, retry_async
from google.auth import exceptions as auth_exceptions # type: ignore
import requests.exceptions
_RETRYABLE_REASONS = frozenset(
["rateLimitExceeded", "backendError", "internalError", "badGateway"]
)
_UNSTRUCTURED_RETRYABLE_TYPES = (
ConnectionError,
exceptions.TooManyRequests,
exceptions.InternalServerError,
exceptions.BadGateway,
exceptions.ServiceUnavailable,
requests.exceptions.ChunkedEncodingError,
requests.exceptions.ConnectionError,
requests.exceptions.Timeout,
auth_exceptions.TransportError,
)
_DEFAULT_RETRY_DEADLINE = 10.0 * 60.0 # 10 minutes
# Allow for a few retries after the API request times out. This relevant for
# rateLimitExceeded errors, which can be raised either by the Google load
# balancer or the BigQuery job server.
_DEFAULT_JOB_DEADLINE = 3.0 * _DEFAULT_RETRY_DEADLINE
def _should_retry(exc):
"""Predicate for determining when to retry.
We retry if and only if the 'reason' is 'backendError'
or 'rateLimitExceeded'.
"""
if not hasattr(exc, "errors") or len(exc.errors) == 0:
# Check for unstructured error returns, e.g. from GFE
return isinstance(exc, _UNSTRUCTURED_RETRYABLE_TYPES)
reason = exc.errors[0]["reason"]
return reason in _RETRYABLE_REASONS
DEFAULT_RETRY = retry.Retry(predicate=_should_retry, deadline=_DEFAULT_RETRY_DEADLINE)
"""The default retry object.
Any method with a ``retry`` parameter will be retried automatically,
with reasonable defaults. To disable retry, pass ``retry=None``.
To modify the default retry behavior, call a ``with_XXX`` method
on ``DEFAULT_RETRY``. For example, to change the deadline to 30 seconds,
pass ``retry=bigquery.DEFAULT_RETRY.with_deadline(30)``.
"""
DEFAULT_TIMEOUT = None
"""The default API timeout.
This is the time to wait per request. To adjust the total wait time, set a
deadline on the retry object.
"""
job_retry_reasons = "rateLimitExceeded", "backendError", "jobRateLimitExceeded"
def _job_should_retry(exc):
if not hasattr(exc, "errors") or len(exc.errors) == 0:
return False
reason = exc.errors[0]["reason"]
return reason in job_retry_reasons
DEFAULT_JOB_RETRY = retry.Retry(
predicate=_job_should_retry, deadline=_DEFAULT_JOB_DEADLINE
)
"""
The default job retry object.
"""
DEFAULT_ASYNC_RETRY = retry_async.AsyncRetry(
predicate=_should_retry, deadline=_DEFAULT_RETRY_DEADLINE
) # deadline is deprecated
DEFAULT_ASYNC_JOB_RETRY = retry_async.AsyncRetry(
predicate=_job_should_retry,
deadline=_DEFAULT_JOB_DEADLINE, # deadline is deprecated
)