1 # Copyright 2013 Google Inc. All Rights Reserved.
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing,
10 # software distributed under the License is distributed on an
11 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
12 # either express or implied. See the License for the specific
13 # language governing permissions and limitations under the License.
15 """Util functions and classes for cloudstorage_api."""
19 __all__
= ['set_default_retry_params',
34 from google
.appengine
.api
import urlfetch
35 from google
.appengine
.datastore
import datastore_rpc
36 from google
.appengine
.ext
.ndb
import eventloop
37 from google
.appengine
.ext
.ndb
import utils
38 from google
.appengine
import runtime
39 from google
.appengine
.runtime
import apiproxy_errors
41 from google
.appengine
.api
import urlfetch
42 from google
.appengine
.datastore
import datastore_rpc
43 from google
.appengine
import runtime
44 from google
.appengine
.runtime
import apiproxy_errors
45 from google
.appengine
.ext
.ndb
import eventloop
46 from google
.appengine
.ext
.ndb
import utils
49 _RETRIABLE_EXCEPTIONS
= (urlfetch
.DownloadError
,
50 apiproxy_errors
.Error
)
52 _thread_local_settings
= threading
.local()
53 _thread_local_settings
.default_retry_params
= None
56 def set_default_retry_params(retry_params
):
57 """Set a default RetryParams for current thread current request."""
58 _thread_local_settings
.default_retry_params
= copy
.copy(retry_params
)
61 def _get_default_retry_params():
62 """Get default RetryParams for current request and current thread.
65 A new instance of the default RetryParams.
67 default
= getattr(_thread_local_settings
, 'default_retry_params', None)
68 if default
is None or not default
.belong_to_current_request():
71 return copy
.copy(default
)
74 def _quote_filename(filename
):
75 """Quotes filename to use as a valid URI path.
78 filename: user provided filename. /bucket/filename.
81 The filename properly quoted to use as URI's path component.
83 return urllib
.quote(filename
)
86 def _unquote_filename(filename
):
87 """Unquotes a valid URI path back to its filename.
89 This is the opposite of _quote_filename.
92 filename: a quoted filename. /bucket/some%20filename.
95 The filename unquoted.
97 return urllib
.unquote(filename
)
100 def _should_retry(resp
):
101 """Given a urlfetch response, decide whether to retry that request."""
102 return (resp
.status_code
== httplib
.REQUEST_TIMEOUT
or
103 (resp
.status_code
>= 500 and
104 resp
.status_code
< 600))
107 class RetryParams(object):
108 """Retry configuration parameters."""
110 @datastore_rpc._positional
(1)
117 max_retry_period
=30.0,
118 urlfetch_timeout
=None,
119 save_access_token
=False):
122 This object is unique per request per thread.
124 Library will retry according to this setting when App Engine Server
125 can't call urlfetch, urlfetch timed out, or urlfetch got a 408 or
129 backoff_factor: exponential backoff multiplier.
130 initial_delay: seconds to delay for the first retry.
131 max_delay: max seconds to delay for every retry.
132 min_retries: min number of times to retry. This value is automatically
133 capped by max_retries.
134 max_retries: max number of times to retry. Set this to 0 for no retry.
135 max_retry_period: max total seconds spent on retry. Retry stops when
136 this period passed AND min_retries has been attempted.
137 urlfetch_timeout: timeout for urlfetch in seconds. Could be None,
138 in which case the value will be chosen by urlfetch module.
139 save_access_token: persist access token to datastore to avoid
140 excessive usage of GetAccessToken API. Usually the token is cached
141 in process and in memcache. In some cases, memcache isn't very
144 self
.backoff_factor
= self
._check
('backoff_factor', backoff_factor
)
145 self
.initial_delay
= self
._check
('initial_delay', initial_delay
)
146 self
.max_delay
= self
._check
('max_delay', max_delay
)
147 self
.max_retry_period
= self
._check
('max_retry_period', max_retry_period
)
148 self
.max_retries
= self
._check
('max_retries', max_retries
, True, int)
149 self
.min_retries
= self
._check
('min_retries', min_retries
, True, int)
150 if self
.min_retries
> self
.max_retries
:
151 self
.min_retries
= self
.max_retries
153 self
.urlfetch_timeout
= None
154 if urlfetch_timeout
is not None:
155 self
.urlfetch_timeout
= self
._check
('urlfetch_timeout', urlfetch_timeout
)
156 self
.save_access_token
= self
._check
('save_access_token', save_access_token
,
159 self
._request
_id
= os
.getenv('REQUEST_LOG_ID')
161 def __eq__(self
, other
):
162 if not isinstance(other
, self
.__class
__):
164 return self
.__dict
__ == other
.__dict
__
166 def __ne__(self
, other
):
167 return not self
.__eq
__(other
)
170 def _check(cls
, name
, val
, can_be_zero
=False, val_type
=float):
171 """Check init arguments.
174 name: name of the argument. For logging purpose.
175 val: value. Value has to be non negative number.
176 can_be_zero: whether value can be zero.
177 val_type: Python type of the value.
183 ValueError: when invalid value is passed in.
184 TypeError: when invalid value type is passed in.
186 valid_types
= [val_type
]
187 if val_type
is float:
188 valid_types
.append(int)
190 if type(val
) not in valid_types
:
192 'Expect type %s for parameter %s' % (val_type
.__name
__, name
))
195 'Value for parameter %s has to be greater than 0' % name
)
196 if not can_be_zero
and val
== 0:
198 'Value for parameter %s can not be 0' % name
)
201 def belong_to_current_request(self
):
202 return os
.getenv('REQUEST_LOG_ID') == self
._request
_id
204 def delay(self
, n
, start_time
):
205 """Calculate delay before the next retry.
208 n: the number of current attempt. The first attempt should be 1.
209 start_time: the time when retry started in unix time.
212 Number of seconds to wait before next retry. -1 if retry should give up.
214 if (n
> self
.max_retries
or
215 (n
> self
.min_retries
and
216 time
.time() - start_time
> self
.max_retry_period
)):
219 math
.pow(self
.backoff_factor
, n
-1) * self
.initial_delay
,
223 def _retry_fetch(url
, retry_params
, **kwds
):
224 """A blocking fetch function similar to urlfetch.fetch.
226 This function should be used when a urlfetch has timed out or the response
227 shows http request timeout. This function will put current thread to
228 sleep between retry backoffs.
232 retry_params: an instance of RetryParams.
233 **kwds: keyword arguments for urlfetch. If deadline is specified in kwds,
234 it precedes the one in RetryParams. If none is specified, it's up to
235 urlfetch to use its own default.
238 A urlfetch response from the last retry. None if no retry was attempted.
241 Whatever exception encountered during the last retry.
244 start_time
= time
.time()
245 delay
= retry_params
.delay(n
, start_time
)
249 logging
.info('Will retry request to %s.', url
)
253 logging
.info('Retry in %s seconds.', delay
)
255 resp
= urlfetch
.fetch(url
, **kwds
)
256 except runtime
.DeadlineExceededError
:
258 'Urlfetch retry %s will exceed request deadline '
259 'after %s seconds total', n
, time
.time() - start_time
)
261 except _RETRIABLE_EXCEPTIONS
, e
:
265 delay
= retry_params
.delay(n
, start_time
)
266 if resp
and not _should_retry(resp
):
270 'Got status %s from GCS.', resp
.status_code
)
273 'Got exception "%r" while contacting GCS.', e
)
278 logging
.info('Urlfetch failed after %s retries and %s seconds in total.',
279 n
- 1, time
.time() - start_time
)
283 def _run_until_rpc():
284 """Eagerly evaluate tasklets until it is blocking on some RPC.
286 Usually ndb eventloop el isn't run until some code calls future.get_result().
288 When an async tasklet is called, the tasklet wrapper evaluates the tasklet
289 code into a generator, enqueues a callback _help_tasklet_along onto
290 the el.current queue, and returns a future.
292 _help_tasklet_along, when called by the el, will
293 get one yielded value from the generator. If the value if another future,
294 set up a callback _on_future_complete to invoke _help_tasklet_along
295 when the dependent future fulfills. If the value if a RPC, set up a
296 callback _on_rpc_complete to invoke _help_tasklet_along when the RPC fulfills.
297 Thus _help_tasklet_along drills down
298 the chain of futures until some future is blocked by RPC. El runs
299 all callbacks and constantly check pending RPC status.
301 el
= eventloop
.get_event_loop()
306 def _eager_tasklet(tasklet
):
307 """Decorator to turn tasklet to run eagerly."""
309 @utils.wrapping(tasklet
)
310 def eager_wrapper(*args
, **kwds
):
311 fut
= tasklet(*args
, **kwds
)