blob: ed074cb5c0f97f75288e64128837b6fcff31f45f [file] [log] [blame]
Craig Citro751b7fb2014-09-23 11:20:38 -07001# Copyright 2014 Google Inc. All Rights Reserved.
John Asmuth864311d2014-04-24 15:46:08 -04002#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15"""Classes to encapsulate a single HTTP request.
16
17The classes implement a command pattern, with every
18object supporting an execute() method that does the
19actuall HTTP request.
20"""
INADA Naoki0bceb332014-08-20 15:27:52 +090021from __future__ import absolute_import
INADA Naokie4ea1a92015-03-04 03:45:42 +090022import six
eesheeshc6425a02016-02-12 15:07:06 +000023from six.moves import http_client
INADA Naokie4ea1a92015-03-04 03:45:42 +090024from six.moves import range
John Asmuth864311d2014-04-24 15:46:08 -040025
26__author__ = 'jcgregorio@google.com (Joe Gregorio)'
27
Pat Ferateed9affd2015-03-03 16:03:15 -080028from six import BytesIO, StringIO
Pat Ferated5b61bd2015-03-03 16:04:11 -080029from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
Pat Ferateed9affd2015-03-03 16:03:15 -080030
John Asmuth864311d2014-04-24 15:46:08 -040031import base64
32import copy
33import gzip
34import httplib2
Craig Citro6ae34d72014-08-18 23:10:09 -070035import json
John Asmuth864311d2014-04-24 15:46:08 -040036import logging
John Asmuth864311d2014-04-24 15:46:08 -040037import mimetypes
38import os
39import random
eesheeshc6425a02016-02-12 15:07:06 +000040import socket
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +010041import ssl
John Asmuth864311d2014-04-24 15:46:08 -040042import sys
43import time
John Asmuth864311d2014-04-24 15:46:08 -040044import uuid
45
46from email.generator import Generator
47from email.mime.multipart import MIMEMultipart
48from email.mime.nonmultipart import MIMENonMultipart
49from email.parser import FeedParser
Pat Ferateb240c172015-03-03 16:23:51 -080050
51from googleapiclient import mimeparse
52from googleapiclient.errors import BatchError
53from googleapiclient.errors import HttpError
54from googleapiclient.errors import InvalidChunkSizeError
55from googleapiclient.errors import ResumableUploadError
56from googleapiclient.errors import UnexpectedBodyError
57from googleapiclient.errors import UnexpectedMethodError
58from googleapiclient.model import JsonModel
John Asmuth864311d2014-04-24 15:46:08 -040059from oauth2client import util
John Asmuth864311d2014-04-24 15:46:08 -040060
61
Emmett Butler09699152016-02-08 14:26:00 -080062LOGGER = logging.getLogger(__name__)
63
John Asmuth864311d2014-04-24 15:46:08 -040064DEFAULT_CHUNK_SIZE = 512*1024
65
66MAX_URI_LENGTH = 2048
67
eesheeshc6425a02016-02-12 15:07:06 +000068_TOO_MANY_REQUESTS = 429
69
70
71def _should_retry_response(resp_status, content):
72 """Determines whether a response should be retried.
73
74 Args:
75 resp_status: The response status received.
76 content: The response content body.
77
78 Returns:
79 True if the response should be retried, otherwise False.
80 """
81 # Retry on 5xx errors.
82 if resp_status >= 500:
83 return True
84
85 # Retry on 429 errors.
86 if resp_status == _TOO_MANY_REQUESTS:
87 return True
88
89 # For 403 errors, we have to check for the `reason` in the response to
90 # determine if we should retry.
91 if resp_status == six.moves.http_client.FORBIDDEN:
92 # If there's no details about the 403 type, don't retry.
93 if not content:
94 return False
95
96 # Content is in JSON format.
97 try:
98 data = json.loads(content.decode('utf-8'))
99 reason = data['error']['errors'][0]['reason']
100 except (UnicodeDecodeError, ValueError, KeyError):
101 LOGGER.warning('Invalid JSON content from response: %s', content)
102 return False
103
104 LOGGER.warning('Encountered 403 Forbidden with reason "%s"', reason)
105
106 # Only retry on rate limit related failures.
107 if reason in ('userRateLimitExceeded', 'rateLimitExceeded', ):
108 return True
109
110 # Everything else is a success or non-retriable so break.
111 return False
112
John Asmuth864311d2014-04-24 15:46:08 -0400113
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100114def _retry_request(http, num_retries, req_type, sleep, rand, uri, method, *args,
115 **kwargs):
116 """Retries an HTTP request multiple times while handling errors.
117
118 If after all retries the request still fails, last error is either returned as
119 return value (for HTTP 5xx errors) or thrown (for ssl.SSLError).
120
121 Args:
122 http: Http object to be used to execute request.
123 num_retries: Maximum number of retries.
124 req_type: Type of the request (used for logging retries).
125 sleep, rand: Functions to sleep for random time between retries.
126 uri: URI to be requested.
127 method: HTTP method to be used.
128 args, kwargs: Additional arguments passed to http.request.
129
130 Returns:
131 resp, content - Response from the http request (may be HTTP 5xx).
132 """
133 resp = None
eesheeshc6425a02016-02-12 15:07:06 +0000134 content = None
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100135 for retry_num in range(num_retries + 1):
136 if retry_num > 0:
eesheeshc6425a02016-02-12 15:07:06 +0000137 # Sleep before retrying.
138 sleep_time = rand() * 2 ** retry_num
Emmett Butler09699152016-02-08 14:26:00 -0800139 LOGGER.warning(
eesheeshc6425a02016-02-12 15:07:06 +0000140 'Sleeping %.2f seconds before retry %d of %d for %s: %s %s, after %s',
141 sleep_time, retry_num, num_retries, req_type, method, uri,
142 resp.status if resp else exception)
143 sleep(sleep_time)
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100144
145 try:
eesheeshc6425a02016-02-12 15:07:06 +0000146 exception = None
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100147 resp, content = http.request(uri, method, *args, **kwargs)
eesheeshc6425a02016-02-12 15:07:06 +0000148 # Retry on SSL errors and socket timeout errors.
149 except ssl.SSLError as ssl_error:
150 exception = ssl_error
151 except socket.error as socket_error:
152 # errno's contents differ by platform, so we have to match by name.
153 if socket.errno.errorcode.get(socket_error.errno) not in (
154 'WSAETIMEDOUT', 'ETIMEDOUT', ):
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100155 raise
eesheeshc6425a02016-02-12 15:07:06 +0000156 exception = socket_error
157
158 if exception:
159 if retry_num == num_retries:
160 raise exception
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100161 else:
162 continue
eesheeshc6425a02016-02-12 15:07:06 +0000163
164 if not _should_retry_response(resp.status, content):
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100165 break
166
167 return resp, content
168
169
John Asmuth864311d2014-04-24 15:46:08 -0400170class MediaUploadProgress(object):
171 """Status of a resumable upload."""
172
173 def __init__(self, resumable_progress, total_size):
174 """Constructor.
175
176 Args:
177 resumable_progress: int, bytes sent so far.
178 total_size: int, total bytes in complete upload, or None if the total
179 upload size isn't known ahead of time.
180 """
181 self.resumable_progress = resumable_progress
182 self.total_size = total_size
183
184 def progress(self):
185 """Percent of upload completed, as a float.
186
187 Returns:
188 the percentage complete as a float, returning 0.0 if the total size of
189 the upload is unknown.
190 """
191 if self.total_size is not None:
192 return float(self.resumable_progress) / float(self.total_size)
193 else:
194 return 0.0
195
196
197class MediaDownloadProgress(object):
198 """Status of a resumable download."""
199
200 def __init__(self, resumable_progress, total_size):
201 """Constructor.
202
203 Args:
204 resumable_progress: int, bytes received so far.
205 total_size: int, total bytes in complete download.
206 """
207 self.resumable_progress = resumable_progress
208 self.total_size = total_size
209
210 def progress(self):
211 """Percent of download completed, as a float.
212
213 Returns:
214 the percentage complete as a float, returning 0.0 if the total size of
215 the download is unknown.
216 """
217 if self.total_size is not None:
218 return float(self.resumable_progress) / float(self.total_size)
219 else:
220 return 0.0
221
222
223class MediaUpload(object):
224 """Describes a media object to upload.
225
226 Base class that defines the interface of MediaUpload subclasses.
227
228 Note that subclasses of MediaUpload may allow you to control the chunksize
229 when uploading a media object. It is important to keep the size of the chunk
230 as large as possible to keep the upload efficient. Other factors may influence
231 the size of the chunk you use, particularly if you are working in an
232 environment where individual HTTP requests may have a hardcoded time limit,
233 such as under certain classes of requests under Google App Engine.
234
235 Streams are io.Base compatible objects that support seek(). Some MediaUpload
236 subclasses support using streams directly to upload data. Support for
237 streaming may be indicated by a MediaUpload sub-class and if appropriate for a
238 platform that stream will be used for uploading the media object. The support
239 for streaming is indicated by has_stream() returning True. The stream() method
240 should return an io.Base object that supports seek(). On platforms where the
241 underlying httplib module supports streaming, for example Python 2.6 and
242 later, the stream will be passed into the http library which will result in
243 less memory being used and possibly faster uploads.
244
245 If you need to upload media that can't be uploaded using any of the existing
246 MediaUpload sub-class then you can sub-class MediaUpload for your particular
247 needs.
248 """
249
250 def chunksize(self):
251 """Chunk size for resumable uploads.
252
253 Returns:
254 Chunk size in bytes.
255 """
256 raise NotImplementedError()
257
258 def mimetype(self):
259 """Mime type of the body.
260
261 Returns:
262 Mime type.
263 """
264 return 'application/octet-stream'
265
266 def size(self):
267 """Size of upload.
268
269 Returns:
270 Size of the body, or None of the size is unknown.
271 """
272 return None
273
274 def resumable(self):
275 """Whether this upload is resumable.
276
277 Returns:
278 True if resumable upload or False.
279 """
280 return False
281
282 def getbytes(self, begin, end):
283 """Get bytes from the media.
284
285 Args:
286 begin: int, offset from beginning of file.
287 length: int, number of bytes to read, starting at begin.
288
289 Returns:
290 A string of bytes read. May be shorter than length if EOF was reached
291 first.
292 """
293 raise NotImplementedError()
294
295 def has_stream(self):
296 """Does the underlying upload support a streaming interface.
297
298 Streaming means it is an io.IOBase subclass that supports seek, i.e.
299 seekable() returns True.
300
301 Returns:
302 True if the call to stream() will return an instance of a seekable io.Base
303 subclass.
304 """
305 return False
306
307 def stream(self):
308 """A stream interface to the data being uploaded.
309
310 Returns:
311 The returned value is an io.IOBase subclass that supports seek, i.e.
312 seekable() returns True.
313 """
314 raise NotImplementedError()
315
316 @util.positional(1)
317 def _to_json(self, strip=None):
318 """Utility function for creating a JSON representation of a MediaUpload.
319
320 Args:
321 strip: array, An array of names of members to not include in the JSON.
322
323 Returns:
324 string, a JSON representation of this instance, suitable to pass to
325 from_json().
326 """
327 t = type(self)
328 d = copy.copy(self.__dict__)
329 if strip is not None:
330 for member in strip:
331 del d[member]
332 d['_class'] = t.__name__
333 d['_module'] = t.__module__
Craig Citro6ae34d72014-08-18 23:10:09 -0700334 return json.dumps(d)
John Asmuth864311d2014-04-24 15:46:08 -0400335
336 def to_json(self):
337 """Create a JSON representation of an instance of MediaUpload.
338
339 Returns:
340 string, a JSON representation of this instance, suitable to pass to
341 from_json().
342 """
343 return self._to_json()
344
345 @classmethod
346 def new_from_json(cls, s):
347 """Utility class method to instantiate a MediaUpload subclass from a JSON
348 representation produced by to_json().
349
350 Args:
351 s: string, JSON from to_json().
352
353 Returns:
354 An instance of the subclass of MediaUpload that was serialized with
355 to_json().
356 """
Craig Citro6ae34d72014-08-18 23:10:09 -0700357 data = json.loads(s)
John Asmuth864311d2014-04-24 15:46:08 -0400358 # Find and call the right classmethod from_json() to restore the object.
359 module = data['_module']
360 m = __import__(module, fromlist=module.split('.')[:-1])
361 kls = getattr(m, data['_class'])
362 from_json = getattr(kls, 'from_json')
363 return from_json(s)
364
365
366class MediaIoBaseUpload(MediaUpload):
367 """A MediaUpload for a io.Base objects.
368
369 Note that the Python file object is compatible with io.Base and can be used
370 with this class also.
371
Pat Ferateed9affd2015-03-03 16:03:15 -0800372 fh = BytesIO('...Some data to upload...')
John Asmuth864311d2014-04-24 15:46:08 -0400373 media = MediaIoBaseUpload(fh, mimetype='image/png',
374 chunksize=1024*1024, resumable=True)
375 farm.animals().insert(
376 id='cow',
377 name='cow.png',
378 media_body=media).execute()
379
380 Depending on the platform you are working on, you may pass -1 as the
381 chunksize, which indicates that the entire file should be uploaded in a single
382 request. If the underlying platform supports streams, such as Python 2.6 or
383 later, then this can be very efficient as it avoids multiple connections, and
384 also avoids loading the entire file into memory before sending it. Note that
385 Google App Engine has a 5MB limit on request size, so you should never set
386 your chunksize larger than 5MB, or to -1.
387 """
388
389 @util.positional(3)
390 def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE,
391 resumable=False):
392 """Constructor.
393
394 Args:
395 fd: io.Base or file object, The source of the bytes to upload. MUST be
396 opened in blocking mode, do not use streams opened in non-blocking mode.
397 The given stream must be seekable, that is, it must be able to call
398 seek() on fd.
399 mimetype: string, Mime-type of the file.
400 chunksize: int, File will be uploaded in chunks of this many bytes. Only
401 used if resumable=True. Pass in a value of -1 if the file is to be
402 uploaded as a single chunk. Note that Google App Engine has a 5MB limit
403 on request size, so you should never set your chunksize larger than 5MB,
404 or to -1.
405 resumable: bool, True if this is a resumable upload. False means upload
406 in a single request.
407 """
408 super(MediaIoBaseUpload, self).__init__()
409 self._fd = fd
410 self._mimetype = mimetype
411 if not (chunksize == -1 or chunksize > 0):
412 raise InvalidChunkSizeError()
413 self._chunksize = chunksize
414 self._resumable = resumable
415
416 self._fd.seek(0, os.SEEK_END)
417 self._size = self._fd.tell()
418
419 def chunksize(self):
420 """Chunk size for resumable uploads.
421
422 Returns:
423 Chunk size in bytes.
424 """
425 return self._chunksize
426
427 def mimetype(self):
428 """Mime type of the body.
429
430 Returns:
431 Mime type.
432 """
433 return self._mimetype
434
435 def size(self):
436 """Size of upload.
437
438 Returns:
439 Size of the body, or None of the size is unknown.
440 """
441 return self._size
442
443 def resumable(self):
444 """Whether this upload is resumable.
445
446 Returns:
447 True if resumable upload or False.
448 """
449 return self._resumable
450
451 def getbytes(self, begin, length):
452 """Get bytes from the media.
453
454 Args:
455 begin: int, offset from beginning of file.
456 length: int, number of bytes to read, starting at begin.
457
458 Returns:
459 A string of bytes read. May be shorted than length if EOF was reached
460 first.
461 """
462 self._fd.seek(begin)
463 return self._fd.read(length)
464
465 def has_stream(self):
466 """Does the underlying upload support a streaming interface.
467
468 Streaming means it is an io.IOBase subclass that supports seek, i.e.
469 seekable() returns True.
470
471 Returns:
472 True if the call to stream() will return an instance of a seekable io.Base
473 subclass.
474 """
475 return True
476
477 def stream(self):
478 """A stream interface to the data being uploaded.
479
480 Returns:
481 The returned value is an io.IOBase subclass that supports seek, i.e.
482 seekable() returns True.
483 """
484 return self._fd
485
486 def to_json(self):
487 """This upload type is not serializable."""
488 raise NotImplementedError('MediaIoBaseUpload is not serializable.')
489
490
491class MediaFileUpload(MediaIoBaseUpload):
492 """A MediaUpload for a file.
493
494 Construct a MediaFileUpload and pass as the media_body parameter of the
495 method. For example, if we had a service that allowed uploading images:
496
497
498 media = MediaFileUpload('cow.png', mimetype='image/png',
499 chunksize=1024*1024, resumable=True)
500 farm.animals().insert(
501 id='cow',
502 name='cow.png',
503 media_body=media).execute()
504
505 Depending on the platform you are working on, you may pass -1 as the
506 chunksize, which indicates that the entire file should be uploaded in a single
507 request. If the underlying platform supports streams, such as Python 2.6 or
508 later, then this can be very efficient as it avoids multiple connections, and
509 also avoids loading the entire file into memory before sending it. Note that
510 Google App Engine has a 5MB limit on request size, so you should never set
511 your chunksize larger than 5MB, or to -1.
512 """
513
514 @util.positional(2)
515 def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE,
516 resumable=False):
517 """Constructor.
518
519 Args:
520 filename: string, Name of the file.
521 mimetype: string, Mime-type of the file. If None then a mime-type will be
522 guessed from the file extension.
523 chunksize: int, File will be uploaded in chunks of this many bytes. Only
524 used if resumable=True. Pass in a value of -1 if the file is to be
525 uploaded in a single chunk. Note that Google App Engine has a 5MB limit
526 on request size, so you should never set your chunksize larger than 5MB,
527 or to -1.
528 resumable: bool, True if this is a resumable upload. False means upload
529 in a single request.
530 """
531 self._filename = filename
532 fd = open(self._filename, 'rb')
533 if mimetype is None:
Nam T. Nguyendc136312015-12-01 10:18:56 -0800534 # No mimetype provided, make a guess.
535 mimetype, _ = mimetypes.guess_type(filename)
536 if mimetype is None:
537 # Guess failed, use octet-stream.
538 mimetype = 'application/octet-stream'
John Asmuth864311d2014-04-24 15:46:08 -0400539 super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize,
540 resumable=resumable)
541
542 def to_json(self):
543 """Creating a JSON representation of an instance of MediaFileUpload.
544
545 Returns:
546 string, a JSON representation of this instance, suitable to pass to
547 from_json().
548 """
549 return self._to_json(strip=['_fd'])
550
551 @staticmethod
552 def from_json(s):
Craig Citro6ae34d72014-08-18 23:10:09 -0700553 d = json.loads(s)
John Asmuth864311d2014-04-24 15:46:08 -0400554 return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'],
555 chunksize=d['_chunksize'], resumable=d['_resumable'])
556
557
558class MediaInMemoryUpload(MediaIoBaseUpload):
559 """MediaUpload for a chunk of bytes.
560
561 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
562 the stream.
563 """
564
565 @util.positional(2)
566 def __init__(self, body, mimetype='application/octet-stream',
567 chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
568 """Create a new MediaInMemoryUpload.
569
570 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
571 the stream.
572
573 Args:
574 body: string, Bytes of body content.
575 mimetype: string, Mime-type of the file or default of
576 'application/octet-stream'.
577 chunksize: int, File will be uploaded in chunks of this many bytes. Only
578 used if resumable=True.
579 resumable: bool, True if this is a resumable upload. False means upload
580 in a single request.
581 """
Pat Ferateed9affd2015-03-03 16:03:15 -0800582 fd = BytesIO(body)
John Asmuth864311d2014-04-24 15:46:08 -0400583 super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
584 resumable=resumable)
585
586
587class MediaIoBaseDownload(object):
588 """"Download media resources.
589
590 Note that the Python file object is compatible with io.Base and can be used
591 with this class also.
592
593
594 Example:
595 request = farms.animals().get_media(id='cow')
596 fh = io.FileIO('cow.png', mode='wb')
597 downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024)
598
599 done = False
600 while done is False:
601 status, done = downloader.next_chunk()
602 if status:
603 print "Download %d%%." % int(status.progress() * 100)
604 print "Download Complete!"
605 """
606
607 @util.positional(3)
608 def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
609 """Constructor.
610
611 Args:
612 fd: io.Base or file object, The stream in which to write the downloaded
613 bytes.
614 request: googleapiclient.http.HttpRequest, the media request to perform in
615 chunks.
616 chunksize: int, File will be downloaded in chunks of this many bytes.
617 """
618 self._fd = fd
619 self._request = request
620 self._uri = request.uri
621 self._chunksize = chunksize
622 self._progress = 0
623 self._total_size = None
624 self._done = False
625
626 # Stubs for testing.
627 self._sleep = time.sleep
628 self._rand = random.random
629
630 @util.positional(1)
631 def next_chunk(self, num_retries=0):
632 """Get the next chunk of the download.
633
634 Args:
635 num_retries: Integer, number of times to retry 500's with randomized
636 exponential backoff. If all retries fail, the raised HttpError
637 represents the last request. If zero (default), we attempt the
638 request only once.
639
640 Returns:
641 (status, done): (MediaDownloadStatus, boolean)
642 The value of 'done' will be True when the media has been fully
643 downloaded.
644
645 Raises:
646 googleapiclient.errors.HttpError if the response was not a 2xx.
647 httplib2.HttpLib2Error if a transport error has occured.
648 """
649 headers = {
650 'range': 'bytes=%d-%d' % (
651 self._progress, self._progress + self._chunksize)
652 }
653 http = self._request.http
654
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100655 resp, content = _retry_request(
656 http, num_retries, 'media download', self._sleep, self._rand, self._uri,
657 'GET', headers=headers)
John Asmuth864311d2014-04-24 15:46:08 -0400658
659 if resp.status in [200, 206]:
660 if 'content-location' in resp and resp['content-location'] != self._uri:
661 self._uri = resp['content-location']
662 self._progress += len(content)
663 self._fd.write(content)
664
665 if 'content-range' in resp:
666 content_range = resp['content-range']
667 length = content_range.rsplit('/', 1)[1]
668 self._total_size = int(length)
jackac8df212015-02-17 12:16:19 -0800669 elif 'content-length' in resp:
jack77c63c92015-02-10 12:11:00 -0800670 self._total_size = int(resp['content-length'])
John Asmuth864311d2014-04-24 15:46:08 -0400671
672 if self._progress == self._total_size:
673 self._done = True
674 return MediaDownloadProgress(self._progress, self._total_size), self._done
675 else:
676 raise HttpError(resp, content, uri=self._uri)
677
678
679class _StreamSlice(object):
680 """Truncated stream.
681
682 Takes a stream and presents a stream that is a slice of the original stream.
683 This is used when uploading media in chunks. In later versions of Python a
684 stream can be passed to httplib in place of the string of data to send. The
685 problem is that httplib just blindly reads to the end of the stream. This
686 wrapper presents a virtual stream that only reads to the end of the chunk.
687 """
688
689 def __init__(self, stream, begin, chunksize):
690 """Constructor.
691
692 Args:
693 stream: (io.Base, file object), the stream to wrap.
694 begin: int, the seek position the chunk begins at.
695 chunksize: int, the size of the chunk.
696 """
697 self._stream = stream
698 self._begin = begin
699 self._chunksize = chunksize
700 self._stream.seek(begin)
701
702 def read(self, n=-1):
703 """Read n bytes.
704
705 Args:
706 n, int, the number of bytes to read.
707
708 Returns:
709 A string of length 'n', or less if EOF is reached.
710 """
711 # The data left available to read sits in [cur, end)
712 cur = self._stream.tell()
713 end = self._begin + self._chunksize
714 if n == -1 or cur + n > end:
715 n = end - cur
716 return self._stream.read(n)
717
718
719class HttpRequest(object):
720 """Encapsulates a single HTTP request."""
721
722 @util.positional(4)
723 def __init__(self, http, postproc, uri,
724 method='GET',
725 body=None,
726 headers=None,
727 methodId=None,
728 resumable=None):
729 """Constructor for an HttpRequest.
730
731 Args:
732 http: httplib2.Http, the transport object to use to make a request
733 postproc: callable, called on the HTTP response and content to transform
734 it into a data object before returning, or raising an exception
735 on an error.
736 uri: string, the absolute URI to send the request to
737 method: string, the HTTP method to use
738 body: string, the request body of the HTTP request,
739 headers: dict, the HTTP request headers
740 methodId: string, a unique identifier for the API method being called.
741 resumable: MediaUpload, None if this is not a resumbale request.
742 """
743 self.uri = uri
744 self.method = method
745 self.body = body
746 self.headers = headers or {}
747 self.methodId = methodId
748 self.http = http
749 self.postproc = postproc
750 self.resumable = resumable
751 self.response_callbacks = []
752 self._in_error_state = False
753
754 # Pull the multipart boundary out of the content-type header.
755 major, minor, params = mimeparse.parse_mime_type(
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100756 self.headers.get('content-type', 'application/json'))
John Asmuth864311d2014-04-24 15:46:08 -0400757
758 # The size of the non-media part of the request.
759 self.body_size = len(self.body or '')
760
761 # The resumable URI to send chunks to.
762 self.resumable_uri = None
763
764 # The bytes that have been uploaded.
765 self.resumable_progress = 0
766
767 # Stubs for testing.
768 self._rand = random.random
769 self._sleep = time.sleep
770
771 @util.positional(1)
772 def execute(self, http=None, num_retries=0):
773 """Execute the request.
774
775 Args:
776 http: httplib2.Http, an http object to be used in place of the
777 one the HttpRequest request object was constructed with.
778 num_retries: Integer, number of times to retry 500's with randomized
779 exponential backoff. If all retries fail, the raised HttpError
780 represents the last request. If zero (default), we attempt the
781 request only once.
782
783 Returns:
784 A deserialized object model of the response body as determined
785 by the postproc.
786
787 Raises:
788 googleapiclient.errors.HttpError if the response was not a 2xx.
789 httplib2.HttpLib2Error if a transport error has occured.
790 """
791 if http is None:
792 http = self.http
793
794 if self.resumable:
795 body = None
796 while body is None:
797 _, body = self.next_chunk(http=http, num_retries=num_retries)
798 return body
799
800 # Non-resumable case.
801
802 if 'content-length' not in self.headers:
803 self.headers['content-length'] = str(self.body_size)
804 # If the request URI is too long then turn it into a POST request.
805 if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET':
806 self.method = 'POST'
807 self.headers['x-http-method-override'] = 'GET'
808 self.headers['content-type'] = 'application/x-www-form-urlencoded'
Pat Ferated5b61bd2015-03-03 16:04:11 -0800809 parsed = urlparse(self.uri)
810 self.uri = urlunparse(
John Asmuth864311d2014-04-24 15:46:08 -0400811 (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
812 None)
813 )
814 self.body = parsed.query
815 self.headers['content-length'] = str(len(self.body))
816
817 # Handle retries for server-side errors.
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100818 resp, content = _retry_request(
819 http, num_retries, 'request', self._sleep, self._rand, str(self.uri),
820 method=str(self.method), body=self.body, headers=self.headers)
John Asmuth864311d2014-04-24 15:46:08 -0400821
822 for callback in self.response_callbacks:
823 callback(resp)
824 if resp.status >= 300:
825 raise HttpError(resp, content, uri=self.uri)
826 return self.postproc(resp, content)
827
828 @util.positional(2)
829 def add_response_callback(self, cb):
830 """add_response_headers_callback
831
832 Args:
833 cb: Callback to be called on receiving the response headers, of signature:
834
835 def cb(resp):
836 # Where resp is an instance of httplib2.Response
837 """
838 self.response_callbacks.append(cb)
839
840 @util.positional(1)
841 def next_chunk(self, http=None, num_retries=0):
842 """Execute the next step of a resumable upload.
843
844 Can only be used if the method being executed supports media uploads and
845 the MediaUpload object passed in was flagged as using resumable upload.
846
847 Example:
848
849 media = MediaFileUpload('cow.png', mimetype='image/png',
850 chunksize=1000, resumable=True)
851 request = farm.animals().insert(
852 id='cow',
853 name='cow.png',
854 media_body=media)
855
856 response = None
857 while response is None:
858 status, response = request.next_chunk()
859 if status:
860 print "Upload %d%% complete." % int(status.progress() * 100)
861
862
863 Args:
864 http: httplib2.Http, an http object to be used in place of the
865 one the HttpRequest request object was constructed with.
866 num_retries: Integer, number of times to retry 500's with randomized
867 exponential backoff. If all retries fail, the raised HttpError
868 represents the last request. If zero (default), we attempt the
869 request only once.
870
871 Returns:
872 (status, body): (ResumableMediaStatus, object)
873 The body will be None until the resumable media is fully uploaded.
874
875 Raises:
876 googleapiclient.errors.HttpError if the response was not a 2xx.
877 httplib2.HttpLib2Error if a transport error has occured.
878 """
879 if http is None:
880 http = self.http
881
882 if self.resumable.size() is None:
883 size = '*'
884 else:
885 size = str(self.resumable.size())
886
887 if self.resumable_uri is None:
888 start_headers = copy.copy(self.headers)
889 start_headers['X-Upload-Content-Type'] = self.resumable.mimetype()
890 if size != '*':
891 start_headers['X-Upload-Content-Length'] = size
892 start_headers['content-length'] = str(self.body_size)
893
Sergiy Byelozyorov703c92c2015-12-21 23:27:48 +0100894 resp, content = _retry_request(
895 http, num_retries, 'resumable URI request', self._sleep, self._rand,
896 self.uri, method=self.method, body=self.body, headers=start_headers)
John Asmuth864311d2014-04-24 15:46:08 -0400897
898 if resp.status == 200 and 'location' in resp:
899 self.resumable_uri = resp['location']
900 else:
901 raise ResumableUploadError(resp, content)
902 elif self._in_error_state:
903 # If we are in an error state then query the server for current state of
904 # the upload by sending an empty PUT and reading the 'range' header in
905 # the response.
906 headers = {
907 'Content-Range': 'bytes */%s' % size,
908 'content-length': '0'
909 }
910 resp, content = http.request(self.resumable_uri, 'PUT',
911 headers=headers)
912 status, body = self._process_response(resp, content)
913 if body:
914 # The upload was complete.
915 return (status, body)
916
e00Efafe8582015-10-10 18:19:37 +0200917 if self.resumable.has_stream():
John Asmuth864311d2014-04-24 15:46:08 -0400918 data = self.resumable.stream()
919 if self.resumable.chunksize() == -1:
920 data.seek(self.resumable_progress)
921 chunk_end = self.resumable.size() - self.resumable_progress - 1
922 else:
923 # Doing chunking with a stream, so wrap a slice of the stream.
924 data = _StreamSlice(data, self.resumable_progress,
925 self.resumable.chunksize())
926 chunk_end = min(
927 self.resumable_progress + self.resumable.chunksize() - 1,
928 self.resumable.size() - 1)
929 else:
930 data = self.resumable.getbytes(
931 self.resumable_progress, self.resumable.chunksize())
932
933 # A short read implies that we are at EOF, so finish the upload.
934 if len(data) < self.resumable.chunksize():
935 size = str(self.resumable_progress + len(data))
936
937 chunk_end = self.resumable_progress + len(data) - 1
938
939 headers = {
940 'Content-Range': 'bytes %d-%d/%s' % (
941 self.resumable_progress, chunk_end, size),
942 # Must set the content-length header here because httplib can't
943 # calculate the size when working with _StreamSlice.
944 'Content-Length': str(chunk_end - self.resumable_progress + 1)
945 }
946
INADA Naokie4ea1a92015-03-04 03:45:42 +0900947 for retry_num in range(num_retries + 1):
John Asmuth864311d2014-04-24 15:46:08 -0400948 if retry_num > 0:
949 self._sleep(self._rand() * 2**retry_num)
Emmett Butler09699152016-02-08 14:26:00 -0800950 LOGGER.warning(
John Asmuth864311d2014-04-24 15:46:08 -0400951 'Retry #%d for media upload: %s %s, following status: %d'
952 % (retry_num, self.method, self.uri, resp.status))
953
954 try:
955 resp, content = http.request(self.resumable_uri, method='PUT',
956 body=data,
957 headers=headers)
958 except:
959 self._in_error_state = True
960 raise
961 if resp.status < 500:
962 break
963
964 return self._process_response(resp, content)
965
966 def _process_response(self, resp, content):
967 """Process the response from a single chunk upload.
968
969 Args:
970 resp: httplib2.Response, the response object.
971 content: string, the content of the response.
972
973 Returns:
974 (status, body): (ResumableMediaStatus, object)
975 The body will be None until the resumable media is fully uploaded.
976
977 Raises:
978 googleapiclient.errors.HttpError if the response was not a 2xx or a 308.
979 """
980 if resp.status in [200, 201]:
981 self._in_error_state = False
982 return None, self.postproc(resp, content)
983 elif resp.status == 308:
984 self._in_error_state = False
985 # A "308 Resume Incomplete" indicates we are not done.
986 self.resumable_progress = int(resp['range'].split('-')[1]) + 1
987 if 'location' in resp:
988 self.resumable_uri = resp['location']
989 else:
990 self._in_error_state = True
991 raise HttpError(resp, content, uri=self.uri)
992
993 return (MediaUploadProgress(self.resumable_progress, self.resumable.size()),
994 None)
995
996 def to_json(self):
997 """Returns a JSON representation of the HttpRequest."""
998 d = copy.copy(self.__dict__)
999 if d['resumable'] is not None:
1000 d['resumable'] = self.resumable.to_json()
1001 del d['http']
1002 del d['postproc']
1003 del d['_sleep']
1004 del d['_rand']
1005
Craig Citro6ae34d72014-08-18 23:10:09 -07001006 return json.dumps(d)
John Asmuth864311d2014-04-24 15:46:08 -04001007
1008 @staticmethod
1009 def from_json(s, http, postproc):
1010 """Returns an HttpRequest populated with info from a JSON object."""
Craig Citro6ae34d72014-08-18 23:10:09 -07001011 d = json.loads(s)
John Asmuth864311d2014-04-24 15:46:08 -04001012 if d['resumable'] is not None:
1013 d['resumable'] = MediaUpload.new_from_json(d['resumable'])
1014 return HttpRequest(
1015 http,
1016 postproc,
1017 uri=d['uri'],
1018 method=d['method'],
1019 body=d['body'],
1020 headers=d['headers'],
1021 methodId=d['methodId'],
1022 resumable=d['resumable'])
1023
1024
1025class BatchHttpRequest(object):
1026 """Batches multiple HttpRequest objects into a single HTTP request.
1027
1028 Example:
1029 from googleapiclient.http import BatchHttpRequest
1030
1031 def list_animals(request_id, response, exception):
1032 \"\"\"Do something with the animals list response.\"\"\"
1033 if exception is not None:
1034 # Do something with the exception.
1035 pass
1036 else:
1037 # Do something with the response.
1038 pass
1039
1040 def list_farmers(request_id, response, exception):
1041 \"\"\"Do something with the farmers list response.\"\"\"
1042 if exception is not None:
1043 # Do something with the exception.
1044 pass
1045 else:
1046 # Do something with the response.
1047 pass
1048
1049 service = build('farm', 'v2')
1050
1051 batch = BatchHttpRequest()
1052
1053 batch.add(service.animals().list(), list_animals)
1054 batch.add(service.farmers().list(), list_farmers)
1055 batch.execute(http=http)
1056 """
1057
1058 @util.positional(1)
1059 def __init__(self, callback=None, batch_uri=None):
1060 """Constructor for a BatchHttpRequest.
1061
1062 Args:
1063 callback: callable, A callback to be called for each response, of the
1064 form callback(id, response, exception). The first parameter is the
1065 request id, and the second is the deserialized response object. The
1066 third is an googleapiclient.errors.HttpError exception object if an HTTP error
1067 occurred while processing the request, or None if no error occurred.
1068 batch_uri: string, URI to send batch requests to.
1069 """
1070 if batch_uri is None:
1071 batch_uri = 'https://www.googleapis.com/batch'
1072 self._batch_uri = batch_uri
1073
1074 # Global callback to be called for each individual response in the batch.
1075 self._callback = callback
1076
1077 # A map from id to request.
1078 self._requests = {}
1079
1080 # A map from id to callback.
1081 self._callbacks = {}
1082
1083 # List of request ids, in the order in which they were added.
1084 self._order = []
1085
1086 # The last auto generated id.
1087 self._last_auto_id = 0
1088
1089 # Unique ID on which to base the Content-ID headers.
1090 self._base_id = None
1091
1092 # A map from request id to (httplib2.Response, content) response pairs
1093 self._responses = {}
1094
1095 # A map of id(Credentials) that have been refreshed.
1096 self._refreshed_credentials = {}
1097
1098 def _refresh_and_apply_credentials(self, request, http):
1099 """Refresh the credentials and apply to the request.
1100
1101 Args:
1102 request: HttpRequest, the request.
1103 http: httplib2.Http, the global http object for the batch.
1104 """
1105 # For the credentials to refresh, but only once per refresh_token
1106 # If there is no http per the request then refresh the http passed in
1107 # via execute()
1108 creds = None
1109 if request.http is not None and hasattr(request.http.request,
1110 'credentials'):
1111 creds = request.http.request.credentials
1112 elif http is not None and hasattr(http.request, 'credentials'):
1113 creds = http.request.credentials
1114 if creds is not None:
1115 if id(creds) not in self._refreshed_credentials:
1116 creds.refresh(http)
1117 self._refreshed_credentials[id(creds)] = 1
1118
1119 # Only apply the credentials if we are using the http object passed in,
1120 # otherwise apply() will get called during _serialize_request().
1121 if request.http is None or not hasattr(request.http.request,
1122 'credentials'):
1123 creds.apply(request.headers)
1124
1125 def _id_to_header(self, id_):
1126 """Convert an id to a Content-ID header value.
1127
1128 Args:
1129 id_: string, identifier of individual request.
1130
1131 Returns:
1132 A Content-ID header with the id_ encoded into it. A UUID is prepended to
1133 the value because Content-ID headers are supposed to be universally
1134 unique.
1135 """
1136 if self._base_id is None:
1137 self._base_id = uuid.uuid4()
1138
Pat Ferated5b61bd2015-03-03 16:04:11 -08001139 return '<%s+%s>' % (self._base_id, quote(id_))
John Asmuth864311d2014-04-24 15:46:08 -04001140
1141 def _header_to_id(self, header):
1142 """Convert a Content-ID header value to an id.
1143
1144 Presumes the Content-ID header conforms to the format that _id_to_header()
1145 returns.
1146
1147 Args:
1148 header: string, Content-ID header value.
1149
1150 Returns:
1151 The extracted id value.
1152
1153 Raises:
1154 BatchError if the header is not in the expected format.
1155 """
1156 if header[0] != '<' or header[-1] != '>':
1157 raise BatchError("Invalid value for Content-ID: %s" % header)
1158 if '+' not in header:
1159 raise BatchError("Invalid value for Content-ID: %s" % header)
1160 base, id_ = header[1:-1].rsplit('+', 1)
1161
Pat Ferated5b61bd2015-03-03 16:04:11 -08001162 return unquote(id_)
John Asmuth864311d2014-04-24 15:46:08 -04001163
1164 def _serialize_request(self, request):
1165 """Convert an HttpRequest object into a string.
1166
1167 Args:
1168 request: HttpRequest, the request to serialize.
1169
1170 Returns:
1171 The request as a string in application/http format.
1172 """
1173 # Construct status line
Pat Ferated5b61bd2015-03-03 16:04:11 -08001174 parsed = urlparse(request.uri)
1175 request_line = urlunparse(
Pat Feratec9abbbd2015-03-03 18:00:38 -08001176 ('', '', parsed.path, parsed.params, parsed.query, '')
John Asmuth864311d2014-04-24 15:46:08 -04001177 )
1178 status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
1179 major, minor = request.headers.get('content-type', 'application/json').split('/')
1180 msg = MIMENonMultipart(major, minor)
1181 headers = request.headers.copy()
1182
1183 if request.http is not None and hasattr(request.http.request,
1184 'credentials'):
1185 request.http.request.credentials.apply(headers)
1186
1187 # MIMENonMultipart adds its own Content-Type header.
1188 if 'content-type' in headers:
1189 del headers['content-type']
1190
INADA Naokie4ea1a92015-03-04 03:45:42 +09001191 for key, value in six.iteritems(headers):
John Asmuth864311d2014-04-24 15:46:08 -04001192 msg[key] = value
1193 msg['Host'] = parsed.netloc
1194 msg.set_unixfrom(None)
1195
1196 if request.body is not None:
1197 msg.set_payload(request.body)
1198 msg['content-length'] = str(len(request.body))
1199
1200 # Serialize the mime message.
Pat Ferateed9affd2015-03-03 16:03:15 -08001201 fp = StringIO()
John Asmuth864311d2014-04-24 15:46:08 -04001202 # maxheaderlen=0 means don't line wrap headers.
1203 g = Generator(fp, maxheaderlen=0)
1204 g.flatten(msg, unixfrom=False)
1205 body = fp.getvalue()
1206
Pat Feratec9abbbd2015-03-03 18:00:38 -08001207 return status_line + body
John Asmuth864311d2014-04-24 15:46:08 -04001208
1209 def _deserialize_response(self, payload):
1210 """Convert string into httplib2 response and content.
1211
1212 Args:
1213 payload: string, headers and body as a string.
1214
1215 Returns:
1216 A pair (resp, content), such as would be returned from httplib2.request.
1217 """
1218 # Strip off the status line
1219 status_line, payload = payload.split('\n', 1)
1220 protocol, status, reason = status_line.split(' ', 2)
1221
1222 # Parse the rest of the response
1223 parser = FeedParser()
1224 parser.feed(payload)
1225 msg = parser.close()
1226 msg['status'] = status
1227
1228 # Create httplib2.Response from the parsed headers.
1229 resp = httplib2.Response(msg)
1230 resp.reason = reason
1231 resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
1232
1233 content = payload.split('\r\n\r\n', 1)[1]
1234
1235 return resp, content
1236
1237 def _new_id(self):
1238 """Create a new id.
1239
1240 Auto incrementing number that avoids conflicts with ids already used.
1241
1242 Returns:
1243 string, a new unique id.
1244 """
1245 self._last_auto_id += 1
1246 while str(self._last_auto_id) in self._requests:
1247 self._last_auto_id += 1
1248 return str(self._last_auto_id)
1249
1250 @util.positional(2)
1251 def add(self, request, callback=None, request_id=None):
1252 """Add a new request.
1253
1254 Every callback added will be paired with a unique id, the request_id. That
1255 unique id will be passed back to the callback when the response comes back
1256 from the server. The default behavior is to have the library generate it's
1257 own unique id. If the caller passes in a request_id then they must ensure
1258 uniqueness for each request_id, and if they are not an exception is
1259 raised. Callers should either supply all request_ids or nevery supply a
1260 request id, to avoid such an error.
1261
1262 Args:
1263 request: HttpRequest, Request to add to the batch.
1264 callback: callable, A callback to be called for this response, of the
1265 form callback(id, response, exception). The first parameter is the
1266 request id, and the second is the deserialized response object. The
1267 third is an googleapiclient.errors.HttpError exception object if an HTTP error
1268 occurred while processing the request, or None if no errors occurred.
1269 request_id: string, A unique id for the request. The id will be passed to
1270 the callback with the response.
1271
1272 Returns:
1273 None
1274
1275 Raises:
1276 BatchError if a media request is added to a batch.
1277 KeyError is the request_id is not unique.
1278 """
1279 if request_id is None:
1280 request_id = self._new_id()
1281 if request.resumable is not None:
1282 raise BatchError("Media requests cannot be used in a batch request.")
1283 if request_id in self._requests:
1284 raise KeyError("A request with this ID already exists: %s" % request_id)
1285 self._requests[request_id] = request
1286 self._callbacks[request_id] = callback
1287 self._order.append(request_id)
1288
1289 def _execute(self, http, order, requests):
1290 """Serialize batch request, send to server, process response.
1291
1292 Args:
1293 http: httplib2.Http, an http object to be used to make the request with.
1294 order: list, list of request ids in the order they were added to the
1295 batch.
1296 request: list, list of request objects to send.
1297
1298 Raises:
1299 httplib2.HttpLib2Error if a transport error has occured.
1300 googleapiclient.errors.BatchError if the response is the wrong format.
1301 """
1302 message = MIMEMultipart('mixed')
1303 # Message should not write out it's own headers.
1304 setattr(message, '_write_headers', lambda self: None)
1305
1306 # Add all the individual requests.
1307 for request_id in order:
1308 request = requests[request_id]
1309
1310 msg = MIMENonMultipart('application', 'http')
1311 msg['Content-Transfer-Encoding'] = 'binary'
1312 msg['Content-ID'] = self._id_to_header(request_id)
1313
1314 body = self._serialize_request(request)
1315 msg.set_payload(body)
1316 message.attach(msg)
1317
Craig Citro72389b72014-07-15 17:12:50 -07001318 # encode the body: note that we can't use `as_string`, because
1319 # it plays games with `From ` lines.
Pat Ferateed9affd2015-03-03 16:03:15 -08001320 fp = StringIO()
Craig Citro72389b72014-07-15 17:12:50 -07001321 g = Generator(fp, mangle_from_=False)
1322 g.flatten(message, unixfrom=False)
1323 body = fp.getvalue()
John Asmuth864311d2014-04-24 15:46:08 -04001324
1325 headers = {}
1326 headers['content-type'] = ('multipart/mixed; '
1327 'boundary="%s"') % message.get_boundary()
1328
1329 resp, content = http.request(self._batch_uri, method='POST', body=body,
1330 headers=headers)
1331
1332 if resp.status >= 300:
1333 raise HttpError(resp, content, uri=self._batch_uri)
1334
John Asmuth864311d2014-04-24 15:46:08 -04001335 # Prepend with a content-type header so FeedParser can handle it.
1336 header = 'content-type: %s\r\n\r\n' % resp['content-type']
INADA Naoki09157612015-03-25 01:51:03 +09001337 # PY3's FeedParser only accepts unicode. So we should decode content
1338 # here, and encode each payload again.
1339 if six.PY3:
1340 content = content.decode('utf-8')
John Asmuth864311d2014-04-24 15:46:08 -04001341 for_parser = header + content
1342
1343 parser = FeedParser()
1344 parser.feed(for_parser)
1345 mime_response = parser.close()
1346
1347 if not mime_response.is_multipart():
1348 raise BatchError("Response not in multipart/mixed format.", resp=resp,
1349 content=content)
1350
1351 for part in mime_response.get_payload():
1352 request_id = self._header_to_id(part['Content-ID'])
1353 response, content = self._deserialize_response(part.get_payload())
INADA Naoki09157612015-03-25 01:51:03 +09001354 # We encode content here to emulate normal http response.
1355 if isinstance(content, six.text_type):
1356 content = content.encode('utf-8')
John Asmuth864311d2014-04-24 15:46:08 -04001357 self._responses[request_id] = (response, content)
1358
1359 @util.positional(1)
1360 def execute(self, http=None):
1361 """Execute all the requests as a single batched HTTP request.
1362
1363 Args:
1364 http: httplib2.Http, an http object to be used in place of the one the
1365 HttpRequest request object was constructed with. If one isn't supplied
1366 then use a http object from the requests in this batch.
1367
1368 Returns:
1369 None
1370
1371 Raises:
1372 httplib2.HttpLib2Error if a transport error has occured.
1373 googleapiclient.errors.BatchError if the response is the wrong format.
1374 """
Mohamed Zenadi1b5350d2015-07-30 11:52:39 +02001375 # If we have no requests return
1376 if len(self._order) == 0:
1377 return None
John Asmuth864311d2014-04-24 15:46:08 -04001378
1379 # If http is not supplied use the first valid one given in the requests.
1380 if http is None:
1381 for request_id in self._order:
1382 request = self._requests[request_id]
1383 if request is not None:
1384 http = request.http
1385 break
1386
1387 if http is None:
1388 raise ValueError("Missing a valid http object.")
1389
1390 self._execute(http, self._order, self._requests)
1391
1392 # Loop over all the requests and check for 401s. For each 401 request the
1393 # credentials should be refreshed and then sent again in a separate batch.
1394 redo_requests = {}
1395 redo_order = []
1396
1397 for request_id in self._order:
1398 resp, content = self._responses[request_id]
1399 if resp['status'] == '401':
1400 redo_order.append(request_id)
1401 request = self._requests[request_id]
1402 self._refresh_and_apply_credentials(request, http)
1403 redo_requests[request_id] = request
1404
1405 if redo_requests:
1406 self._execute(http, redo_order, redo_requests)
1407
1408 # Now process all callbacks that are erroring, and raise an exception for
1409 # ones that return a non-2xx response? Or add extra parameter to callback
1410 # that contains an HttpError?
1411
1412 for request_id in self._order:
1413 resp, content = self._responses[request_id]
1414
1415 request = self._requests[request_id]
1416 callback = self._callbacks[request_id]
1417
1418 response = None
1419 exception = None
1420 try:
1421 if resp.status >= 300:
1422 raise HttpError(resp, content, uri=request.uri)
1423 response = request.postproc(resp, content)
INADA Naokic1505df2014-08-20 15:19:53 +09001424 except HttpError as e:
John Asmuth864311d2014-04-24 15:46:08 -04001425 exception = e
1426
1427 if callback is not None:
1428 callback(request_id, response, exception)
1429 if self._callback is not None:
1430 self._callback(request_id, response, exception)
1431
1432
1433class HttpRequestMock(object):
1434 """Mock of HttpRequest.
1435
1436 Do not construct directly, instead use RequestMockBuilder.
1437 """
1438
1439 def __init__(self, resp, content, postproc):
1440 """Constructor for HttpRequestMock
1441
1442 Args:
1443 resp: httplib2.Response, the response to emulate coming from the request
1444 content: string, the response body
1445 postproc: callable, the post processing function usually supplied by
1446 the model class. See model.JsonModel.response() as an example.
1447 """
1448 self.resp = resp
1449 self.content = content
1450 self.postproc = postproc
1451 if resp is None:
1452 self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
1453 if 'reason' in self.resp:
1454 self.resp.reason = self.resp['reason']
1455
1456 def execute(self, http=None):
1457 """Execute the request.
1458
1459 Same behavior as HttpRequest.execute(), but the response is
1460 mocked and not really from an HTTP request/response.
1461 """
1462 return self.postproc(self.resp, self.content)
1463
1464
1465class RequestMockBuilder(object):
1466 """A simple mock of HttpRequest
1467
1468 Pass in a dictionary to the constructor that maps request methodIds to
1469 tuples of (httplib2.Response, content, opt_expected_body) that should be
1470 returned when that method is called. None may also be passed in for the
1471 httplib2.Response, in which case a 200 OK response will be generated.
1472 If an opt_expected_body (str or dict) is provided, it will be compared to
1473 the body and UnexpectedBodyError will be raised on inequality.
1474
1475 Example:
1476 response = '{"data": {"id": "tag:google.c...'
1477 requestBuilder = RequestMockBuilder(
1478 {
1479 'plus.activities.get': (None, response),
1480 }
1481 )
1482 googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
1483
1484 Methods that you do not supply a response for will return a
1485 200 OK with an empty string as the response content or raise an excpetion
1486 if check_unexpected is set to True. The methodId is taken from the rpcName
1487 in the discovery document.
1488
1489 For more details see the project wiki.
1490 """
1491
1492 def __init__(self, responses, check_unexpected=False):
1493 """Constructor for RequestMockBuilder
1494
1495 The constructed object should be a callable object
1496 that can replace the class HttpResponse.
1497
1498 responses - A dictionary that maps methodIds into tuples
1499 of (httplib2.Response, content). The methodId
1500 comes from the 'rpcName' field in the discovery
1501 document.
1502 check_unexpected - A boolean setting whether or not UnexpectedMethodError
1503 should be raised on unsupplied method.
1504 """
1505 self.responses = responses
1506 self.check_unexpected = check_unexpected
1507
1508 def __call__(self, http, postproc, uri, method='GET', body=None,
1509 headers=None, methodId=None, resumable=None):
1510 """Implements the callable interface that discovery.build() expects
1511 of requestBuilder, which is to build an object compatible with
1512 HttpRequest.execute(). See that method for the description of the
1513 parameters and the expected response.
1514 """
1515 if methodId in self.responses:
1516 response = self.responses[methodId]
1517 resp, content = response[:2]
1518 if len(response) > 2:
1519 # Test the body against the supplied expected_body.
1520 expected_body = response[2]
1521 if bool(expected_body) != bool(body):
1522 # Not expecting a body and provided one
1523 # or expecting a body and not provided one.
1524 raise UnexpectedBodyError(expected_body, body)
1525 if isinstance(expected_body, str):
Craig Citro6ae34d72014-08-18 23:10:09 -07001526 expected_body = json.loads(expected_body)
1527 body = json.loads(body)
John Asmuth864311d2014-04-24 15:46:08 -04001528 if body != expected_body:
1529 raise UnexpectedBodyError(expected_body, body)
1530 return HttpRequestMock(resp, content, postproc)
1531 elif self.check_unexpected:
1532 raise UnexpectedMethodError(methodId=methodId)
1533 else:
1534 model = JsonModel(False)
1535 return HttpRequestMock(None, '{}', model.response)
1536
1537
1538class HttpMock(object):
1539 """Mock of httplib2.Http"""
1540
1541 def __init__(self, filename=None, headers=None):
1542 """
1543 Args:
1544 filename: string, absolute filename to read response from
1545 headers: dict, header to return with response
1546 """
1547 if headers is None:
Craig Gurnik8e55b762015-01-20 15:00:10 -05001548 headers = {'status': '200'}
John Asmuth864311d2014-04-24 15:46:08 -04001549 if filename:
Alan Briolat26b01002015-08-14 00:13:57 +01001550 f = open(filename, 'rb')
John Asmuth864311d2014-04-24 15:46:08 -04001551 self.data = f.read()
1552 f.close()
1553 else:
1554 self.data = None
1555 self.response_headers = headers
1556 self.headers = None
1557 self.uri = None
1558 self.method = None
1559 self.body = None
1560 self.headers = None
1561
1562
1563 def request(self, uri,
1564 method='GET',
1565 body=None,
1566 headers=None,
1567 redirections=1,
1568 connection_type=None):
1569 self.uri = uri
1570 self.method = method
1571 self.body = body
1572 self.headers = headers
1573 return httplib2.Response(self.response_headers), self.data
1574
1575
1576class HttpMockSequence(object):
1577 """Mock of httplib2.Http
1578
1579 Mocks a sequence of calls to request returning different responses for each
1580 call. Create an instance initialized with the desired response headers
1581 and content and then use as if an httplib2.Http instance.
1582
1583 http = HttpMockSequence([
1584 ({'status': '401'}, ''),
1585 ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
1586 ({'status': '200'}, 'echo_request_headers'),
1587 ])
1588 resp, content = http.request("http://examples.com")
1589
1590 There are special values you can pass in for content to trigger
1591 behavours that are helpful in testing.
1592
1593 'echo_request_headers' means return the request headers in the response body
1594 'echo_request_headers_as_json' means return the request headers in
1595 the response body
1596 'echo_request_body' means return the request body in the response body
1597 'echo_request_uri' means return the request uri in the response body
1598 """
1599
1600 def __init__(self, iterable):
1601 """
1602 Args:
1603 iterable: iterable, a sequence of pairs of (headers, body)
1604 """
1605 self._iterable = iterable
1606 self.follow_redirects = True
1607
1608 def request(self, uri,
1609 method='GET',
1610 body=None,
1611 headers=None,
1612 redirections=1,
1613 connection_type=None):
1614 resp, content = self._iterable.pop(0)
1615 if content == 'echo_request_headers':
1616 content = headers
1617 elif content == 'echo_request_headers_as_json':
Craig Citro6ae34d72014-08-18 23:10:09 -07001618 content = json.dumps(headers)
John Asmuth864311d2014-04-24 15:46:08 -04001619 elif content == 'echo_request_body':
1620 if hasattr(body, 'read'):
1621 content = body.read()
1622 else:
1623 content = body
1624 elif content == 'echo_request_uri':
1625 content = uri
INADA Naoki09157612015-03-25 01:51:03 +09001626 if isinstance(content, six.text_type):
1627 content = content.encode('utf-8')
John Asmuth864311d2014-04-24 15:46:08 -04001628 return httplib2.Response(resp), content
1629
1630
1631def set_user_agent(http, user_agent):
1632 """Set the user-agent on every request.
1633
1634 Args:
1635 http - An instance of httplib2.Http
1636 or something that acts like it.
1637 user_agent: string, the value for the user-agent header.
1638
1639 Returns:
1640 A modified instance of http that was passed in.
1641
1642 Example:
1643
1644 h = httplib2.Http()
1645 h = set_user_agent(h, "my-app-name/6.0")
1646
1647 Most of the time the user-agent will be set doing auth, this is for the rare
1648 cases where you are accessing an unauthenticated endpoint.
1649 """
1650 request_orig = http.request
1651
1652 # The closure that will replace 'httplib2.Http.request'.
1653 def new_request(uri, method='GET', body=None, headers=None,
1654 redirections=httplib2.DEFAULT_MAX_REDIRECTS,
1655 connection_type=None):
1656 """Modify the request headers to add the user-agent."""
1657 if headers is None:
1658 headers = {}
1659 if 'user-agent' in headers:
1660 headers['user-agent'] = user_agent + ' ' + headers['user-agent']
1661 else:
1662 headers['user-agent'] = user_agent
1663 resp, content = request_orig(uri, method, body, headers,
1664 redirections, connection_type)
1665 return resp, content
1666
1667 http.request = new_request
1668 return http
1669
1670
1671def tunnel_patch(http):
1672 """Tunnel PATCH requests over POST.
1673 Args:
1674 http - An instance of httplib2.Http
1675 or something that acts like it.
1676
1677 Returns:
1678 A modified instance of http that was passed in.
1679
1680 Example:
1681
1682 h = httplib2.Http()
1683 h = tunnel_patch(h, "my-app-name/6.0")
1684
1685 Useful if you are running on a platform that doesn't support PATCH.
1686 Apply this last if you are using OAuth 1.0, as changing the method
1687 will result in a different signature.
1688 """
1689 request_orig = http.request
1690
1691 # The closure that will replace 'httplib2.Http.request'.
1692 def new_request(uri, method='GET', body=None, headers=None,
1693 redirections=httplib2.DEFAULT_MAX_REDIRECTS,
1694 connection_type=None):
1695 """Modify the request headers to add the user-agent."""
1696 if headers is None:
1697 headers = {}
1698 if method == 'PATCH':
1699 if 'oauth_token' in headers.get('authorization', ''):
Emmett Butler09699152016-02-08 14:26:00 -08001700 LOGGER.warning(
John Asmuth864311d2014-04-24 15:46:08 -04001701 'OAuth 1.0 request made with Credentials after tunnel_patch.')
1702 headers['x-http-method-override'] = "PATCH"
1703 method = 'POST'
1704 resp, content = request_orig(uri, method, body, headers,
1705 redirections, connection_type)
1706 return resp, content
1707
1708 http.request = new_request
1709 return http