blob: 16381256767837da22cc169d50541a0d6b4ba357 [file] [log] [blame]
Craig Citro751b7fb2014-09-23 11:20:38 -07001# Copyright 2014 Google Inc. All Rights Reserved.
John Asmuth864311d2014-04-24 15:46:08 -04002#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15"""Classes to encapsulate a single HTTP request.
16
17The classes implement a command pattern, with every
18object supporting an execute() method that does the
19actuall HTTP request.
20"""
INADA Naoki0bceb332014-08-20 15:27:52 +090021from __future__ import absolute_import
INADA Naokie4ea1a92015-03-04 03:45:42 +090022import six
23from six.moves import range
John Asmuth864311d2014-04-24 15:46:08 -040024
25__author__ = 'jcgregorio@google.com (Joe Gregorio)'
26
Pat Ferateed9affd2015-03-03 16:03:15 -080027from six import BytesIO, StringIO
Pat Ferated5b61bd2015-03-03 16:04:11 -080028from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
Pat Ferateed9affd2015-03-03 16:03:15 -080029
John Asmuth864311d2014-04-24 15:46:08 -040030import base64
31import copy
32import gzip
33import httplib2
Craig Citro6ae34d72014-08-18 23:10:09 -070034import json
John Asmuth864311d2014-04-24 15:46:08 -040035import logging
John Asmuth864311d2014-04-24 15:46:08 -040036import mimetypes
37import os
38import random
39import sys
40import time
John Asmuth864311d2014-04-24 15:46:08 -040041import uuid
42
43from email.generator import Generator
44from email.mime.multipart import MIMEMultipart
45from email.mime.nonmultipart import MIMENonMultipart
46from email.parser import FeedParser
Pat Ferateb240c172015-03-03 16:23:51 -080047
48from googleapiclient import mimeparse
49from googleapiclient.errors import BatchError
50from googleapiclient.errors import HttpError
51from googleapiclient.errors import InvalidChunkSizeError
52from googleapiclient.errors import ResumableUploadError
53from googleapiclient.errors import UnexpectedBodyError
54from googleapiclient.errors import UnexpectedMethodError
55from googleapiclient.model import JsonModel
John Asmuth864311d2014-04-24 15:46:08 -040056from oauth2client import util
John Asmuth864311d2014-04-24 15:46:08 -040057
58
59DEFAULT_CHUNK_SIZE = 512*1024
60
61MAX_URI_LENGTH = 2048
62
63
64class MediaUploadProgress(object):
65 """Status of a resumable upload."""
66
67 def __init__(self, resumable_progress, total_size):
68 """Constructor.
69
70 Args:
71 resumable_progress: int, bytes sent so far.
72 total_size: int, total bytes in complete upload, or None if the total
73 upload size isn't known ahead of time.
74 """
75 self.resumable_progress = resumable_progress
76 self.total_size = total_size
77
78 def progress(self):
79 """Percent of upload completed, as a float.
80
81 Returns:
82 the percentage complete as a float, returning 0.0 if the total size of
83 the upload is unknown.
84 """
85 if self.total_size is not None:
86 return float(self.resumable_progress) / float(self.total_size)
87 else:
88 return 0.0
89
90
91class MediaDownloadProgress(object):
92 """Status of a resumable download."""
93
94 def __init__(self, resumable_progress, total_size):
95 """Constructor.
96
97 Args:
98 resumable_progress: int, bytes received so far.
99 total_size: int, total bytes in complete download.
100 """
101 self.resumable_progress = resumable_progress
102 self.total_size = total_size
103
104 def progress(self):
105 """Percent of download completed, as a float.
106
107 Returns:
108 the percentage complete as a float, returning 0.0 if the total size of
109 the download is unknown.
110 """
111 if self.total_size is not None:
112 return float(self.resumable_progress) / float(self.total_size)
113 else:
114 return 0.0
115
116
117class MediaUpload(object):
118 """Describes a media object to upload.
119
120 Base class that defines the interface of MediaUpload subclasses.
121
122 Note that subclasses of MediaUpload may allow you to control the chunksize
123 when uploading a media object. It is important to keep the size of the chunk
124 as large as possible to keep the upload efficient. Other factors may influence
125 the size of the chunk you use, particularly if you are working in an
126 environment where individual HTTP requests may have a hardcoded time limit,
127 such as under certain classes of requests under Google App Engine.
128
129 Streams are io.Base compatible objects that support seek(). Some MediaUpload
130 subclasses support using streams directly to upload data. Support for
131 streaming may be indicated by a MediaUpload sub-class and if appropriate for a
132 platform that stream will be used for uploading the media object. The support
133 for streaming is indicated by has_stream() returning True. The stream() method
134 should return an io.Base object that supports seek(). On platforms where the
135 underlying httplib module supports streaming, for example Python 2.6 and
136 later, the stream will be passed into the http library which will result in
137 less memory being used and possibly faster uploads.
138
139 If you need to upload media that can't be uploaded using any of the existing
140 MediaUpload sub-class then you can sub-class MediaUpload for your particular
141 needs.
142 """
143
144 def chunksize(self):
145 """Chunk size for resumable uploads.
146
147 Returns:
148 Chunk size in bytes.
149 """
150 raise NotImplementedError()
151
152 def mimetype(self):
153 """Mime type of the body.
154
155 Returns:
156 Mime type.
157 """
158 return 'application/octet-stream'
159
160 def size(self):
161 """Size of upload.
162
163 Returns:
164 Size of the body, or None of the size is unknown.
165 """
166 return None
167
168 def resumable(self):
169 """Whether this upload is resumable.
170
171 Returns:
172 True if resumable upload or False.
173 """
174 return False
175
176 def getbytes(self, begin, end):
177 """Get bytes from the media.
178
179 Args:
180 begin: int, offset from beginning of file.
181 length: int, number of bytes to read, starting at begin.
182
183 Returns:
184 A string of bytes read. May be shorter than length if EOF was reached
185 first.
186 """
187 raise NotImplementedError()
188
189 def has_stream(self):
190 """Does the underlying upload support a streaming interface.
191
192 Streaming means it is an io.IOBase subclass that supports seek, i.e.
193 seekable() returns True.
194
195 Returns:
196 True if the call to stream() will return an instance of a seekable io.Base
197 subclass.
198 """
199 return False
200
201 def stream(self):
202 """A stream interface to the data being uploaded.
203
204 Returns:
205 The returned value is an io.IOBase subclass that supports seek, i.e.
206 seekable() returns True.
207 """
208 raise NotImplementedError()
209
210 @util.positional(1)
211 def _to_json(self, strip=None):
212 """Utility function for creating a JSON representation of a MediaUpload.
213
214 Args:
215 strip: array, An array of names of members to not include in the JSON.
216
217 Returns:
218 string, a JSON representation of this instance, suitable to pass to
219 from_json().
220 """
221 t = type(self)
222 d = copy.copy(self.__dict__)
223 if strip is not None:
224 for member in strip:
225 del d[member]
226 d['_class'] = t.__name__
227 d['_module'] = t.__module__
Craig Citro6ae34d72014-08-18 23:10:09 -0700228 return json.dumps(d)
John Asmuth864311d2014-04-24 15:46:08 -0400229
230 def to_json(self):
231 """Create a JSON representation of an instance of MediaUpload.
232
233 Returns:
234 string, a JSON representation of this instance, suitable to pass to
235 from_json().
236 """
237 return self._to_json()
238
239 @classmethod
240 def new_from_json(cls, s):
241 """Utility class method to instantiate a MediaUpload subclass from a JSON
242 representation produced by to_json().
243
244 Args:
245 s: string, JSON from to_json().
246
247 Returns:
248 An instance of the subclass of MediaUpload that was serialized with
249 to_json().
250 """
Craig Citro6ae34d72014-08-18 23:10:09 -0700251 data = json.loads(s)
John Asmuth864311d2014-04-24 15:46:08 -0400252 # Find and call the right classmethod from_json() to restore the object.
253 module = data['_module']
254 m = __import__(module, fromlist=module.split('.')[:-1])
255 kls = getattr(m, data['_class'])
256 from_json = getattr(kls, 'from_json')
257 return from_json(s)
258
259
260class MediaIoBaseUpload(MediaUpload):
261 """A MediaUpload for a io.Base objects.
262
263 Note that the Python file object is compatible with io.Base and can be used
264 with this class also.
265
Pat Ferateed9affd2015-03-03 16:03:15 -0800266 fh = BytesIO('...Some data to upload...')
John Asmuth864311d2014-04-24 15:46:08 -0400267 media = MediaIoBaseUpload(fh, mimetype='image/png',
268 chunksize=1024*1024, resumable=True)
269 farm.animals().insert(
270 id='cow',
271 name='cow.png',
272 media_body=media).execute()
273
274 Depending on the platform you are working on, you may pass -1 as the
275 chunksize, which indicates that the entire file should be uploaded in a single
276 request. If the underlying platform supports streams, such as Python 2.6 or
277 later, then this can be very efficient as it avoids multiple connections, and
278 also avoids loading the entire file into memory before sending it. Note that
279 Google App Engine has a 5MB limit on request size, so you should never set
280 your chunksize larger than 5MB, or to -1.
281 """
282
283 @util.positional(3)
284 def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE,
285 resumable=False):
286 """Constructor.
287
288 Args:
289 fd: io.Base or file object, The source of the bytes to upload. MUST be
290 opened in blocking mode, do not use streams opened in non-blocking mode.
291 The given stream must be seekable, that is, it must be able to call
292 seek() on fd.
293 mimetype: string, Mime-type of the file.
294 chunksize: int, File will be uploaded in chunks of this many bytes. Only
295 used if resumable=True. Pass in a value of -1 if the file is to be
296 uploaded as a single chunk. Note that Google App Engine has a 5MB limit
297 on request size, so you should never set your chunksize larger than 5MB,
298 or to -1.
299 resumable: bool, True if this is a resumable upload. False means upload
300 in a single request.
301 """
302 super(MediaIoBaseUpload, self).__init__()
303 self._fd = fd
304 self._mimetype = mimetype
305 if not (chunksize == -1 or chunksize > 0):
306 raise InvalidChunkSizeError()
307 self._chunksize = chunksize
308 self._resumable = resumable
309
310 self._fd.seek(0, os.SEEK_END)
311 self._size = self._fd.tell()
312
313 def chunksize(self):
314 """Chunk size for resumable uploads.
315
316 Returns:
317 Chunk size in bytes.
318 """
319 return self._chunksize
320
321 def mimetype(self):
322 """Mime type of the body.
323
324 Returns:
325 Mime type.
326 """
327 return self._mimetype
328
329 def size(self):
330 """Size of upload.
331
332 Returns:
333 Size of the body, or None of the size is unknown.
334 """
335 return self._size
336
337 def resumable(self):
338 """Whether this upload is resumable.
339
340 Returns:
341 True if resumable upload or False.
342 """
343 return self._resumable
344
345 def getbytes(self, begin, length):
346 """Get bytes from the media.
347
348 Args:
349 begin: int, offset from beginning of file.
350 length: int, number of bytes to read, starting at begin.
351
352 Returns:
353 A string of bytes read. May be shorted than length if EOF was reached
354 first.
355 """
356 self._fd.seek(begin)
357 return self._fd.read(length)
358
359 def has_stream(self):
360 """Does the underlying upload support a streaming interface.
361
362 Streaming means it is an io.IOBase subclass that supports seek, i.e.
363 seekable() returns True.
364
365 Returns:
366 True if the call to stream() will return an instance of a seekable io.Base
367 subclass.
368 """
369 return True
370
371 def stream(self):
372 """A stream interface to the data being uploaded.
373
374 Returns:
375 The returned value is an io.IOBase subclass that supports seek, i.e.
376 seekable() returns True.
377 """
378 return self._fd
379
380 def to_json(self):
381 """This upload type is not serializable."""
382 raise NotImplementedError('MediaIoBaseUpload is not serializable.')
383
384
385class MediaFileUpload(MediaIoBaseUpload):
386 """A MediaUpload for a file.
387
388 Construct a MediaFileUpload and pass as the media_body parameter of the
389 method. For example, if we had a service that allowed uploading images:
390
391
392 media = MediaFileUpload('cow.png', mimetype='image/png',
393 chunksize=1024*1024, resumable=True)
394 farm.animals().insert(
395 id='cow',
396 name='cow.png',
397 media_body=media).execute()
398
399 Depending on the platform you are working on, you may pass -1 as the
400 chunksize, which indicates that the entire file should be uploaded in a single
401 request. If the underlying platform supports streams, such as Python 2.6 or
402 later, then this can be very efficient as it avoids multiple connections, and
403 also avoids loading the entire file into memory before sending it. Note that
404 Google App Engine has a 5MB limit on request size, so you should never set
405 your chunksize larger than 5MB, or to -1.
406 """
407
408 @util.positional(2)
409 def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE,
410 resumable=False):
411 """Constructor.
412
413 Args:
414 filename: string, Name of the file.
415 mimetype: string, Mime-type of the file. If None then a mime-type will be
416 guessed from the file extension.
417 chunksize: int, File will be uploaded in chunks of this many bytes. Only
418 used if resumable=True. Pass in a value of -1 if the file is to be
419 uploaded in a single chunk. Note that Google App Engine has a 5MB limit
420 on request size, so you should never set your chunksize larger than 5MB,
421 or to -1.
422 resumable: bool, True if this is a resumable upload. False means upload
423 in a single request.
424 """
425 self._filename = filename
426 fd = open(self._filename, 'rb')
427 if mimetype is None:
428 (mimetype, encoding) = mimetypes.guess_type(filename)
429 super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize,
430 resumable=resumable)
431
432 def to_json(self):
433 """Creating a JSON representation of an instance of MediaFileUpload.
434
435 Returns:
436 string, a JSON representation of this instance, suitable to pass to
437 from_json().
438 """
439 return self._to_json(strip=['_fd'])
440
441 @staticmethod
442 def from_json(s):
Craig Citro6ae34d72014-08-18 23:10:09 -0700443 d = json.loads(s)
John Asmuth864311d2014-04-24 15:46:08 -0400444 return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'],
445 chunksize=d['_chunksize'], resumable=d['_resumable'])
446
447
448class MediaInMemoryUpload(MediaIoBaseUpload):
449 """MediaUpload for a chunk of bytes.
450
451 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
452 the stream.
453 """
454
455 @util.positional(2)
456 def __init__(self, body, mimetype='application/octet-stream',
457 chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
458 """Create a new MediaInMemoryUpload.
459
460 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
461 the stream.
462
463 Args:
464 body: string, Bytes of body content.
465 mimetype: string, Mime-type of the file or default of
466 'application/octet-stream'.
467 chunksize: int, File will be uploaded in chunks of this many bytes. Only
468 used if resumable=True.
469 resumable: bool, True if this is a resumable upload. False means upload
470 in a single request.
471 """
Pat Ferateed9affd2015-03-03 16:03:15 -0800472 fd = BytesIO(body)
John Asmuth864311d2014-04-24 15:46:08 -0400473 super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
474 resumable=resumable)
475
476
477class MediaIoBaseDownload(object):
478 """"Download media resources.
479
480 Note that the Python file object is compatible with io.Base and can be used
481 with this class also.
482
483
484 Example:
485 request = farms.animals().get_media(id='cow')
486 fh = io.FileIO('cow.png', mode='wb')
487 downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024)
488
489 done = False
490 while done is False:
491 status, done = downloader.next_chunk()
492 if status:
493 print "Download %d%%." % int(status.progress() * 100)
494 print "Download Complete!"
495 """
496
497 @util.positional(3)
498 def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
499 """Constructor.
500
501 Args:
502 fd: io.Base or file object, The stream in which to write the downloaded
503 bytes.
504 request: googleapiclient.http.HttpRequest, the media request to perform in
505 chunks.
506 chunksize: int, File will be downloaded in chunks of this many bytes.
507 """
508 self._fd = fd
509 self._request = request
510 self._uri = request.uri
511 self._chunksize = chunksize
512 self._progress = 0
513 self._total_size = None
514 self._done = False
515
516 # Stubs for testing.
517 self._sleep = time.sleep
518 self._rand = random.random
519
520 @util.positional(1)
521 def next_chunk(self, num_retries=0):
522 """Get the next chunk of the download.
523
524 Args:
525 num_retries: Integer, number of times to retry 500's with randomized
526 exponential backoff. If all retries fail, the raised HttpError
527 represents the last request. If zero (default), we attempt the
528 request only once.
529
530 Returns:
531 (status, done): (MediaDownloadStatus, boolean)
532 The value of 'done' will be True when the media has been fully
533 downloaded.
534
535 Raises:
536 googleapiclient.errors.HttpError if the response was not a 2xx.
537 httplib2.HttpLib2Error if a transport error has occured.
538 """
539 headers = {
540 'range': 'bytes=%d-%d' % (
541 self._progress, self._progress + self._chunksize)
542 }
543 http = self._request.http
544
INADA Naokie4ea1a92015-03-04 03:45:42 +0900545 for retry_num in range(num_retries + 1):
John Asmuth864311d2014-04-24 15:46:08 -0400546 if retry_num > 0:
547 self._sleep(self._rand() * 2**retry_num)
548 logging.warning(
549 'Retry #%d for media download: GET %s, following status: %d'
550 % (retry_num, self._uri, resp.status))
551
552 resp, content = http.request(self._uri, headers=headers)
553 if resp.status < 500:
554 break
555
556 if resp.status in [200, 206]:
557 if 'content-location' in resp and resp['content-location'] != self._uri:
558 self._uri = resp['content-location']
559 self._progress += len(content)
560 self._fd.write(content)
561
562 if 'content-range' in resp:
563 content_range = resp['content-range']
564 length = content_range.rsplit('/', 1)[1]
565 self._total_size = int(length)
jackac8df212015-02-17 12:16:19 -0800566 elif 'content-length' in resp:
jack77c63c92015-02-10 12:11:00 -0800567 self._total_size = int(resp['content-length'])
John Asmuth864311d2014-04-24 15:46:08 -0400568
569 if self._progress == self._total_size:
570 self._done = True
571 return MediaDownloadProgress(self._progress, self._total_size), self._done
572 else:
573 raise HttpError(resp, content, uri=self._uri)
574
575
576class _StreamSlice(object):
577 """Truncated stream.
578
579 Takes a stream and presents a stream that is a slice of the original stream.
580 This is used when uploading media in chunks. In later versions of Python a
581 stream can be passed to httplib in place of the string of data to send. The
582 problem is that httplib just blindly reads to the end of the stream. This
583 wrapper presents a virtual stream that only reads to the end of the chunk.
584 """
585
586 def __init__(self, stream, begin, chunksize):
587 """Constructor.
588
589 Args:
590 stream: (io.Base, file object), the stream to wrap.
591 begin: int, the seek position the chunk begins at.
592 chunksize: int, the size of the chunk.
593 """
594 self._stream = stream
595 self._begin = begin
596 self._chunksize = chunksize
597 self._stream.seek(begin)
598
599 def read(self, n=-1):
600 """Read n bytes.
601
602 Args:
603 n, int, the number of bytes to read.
604
605 Returns:
606 A string of length 'n', or less if EOF is reached.
607 """
608 # The data left available to read sits in [cur, end)
609 cur = self._stream.tell()
610 end = self._begin + self._chunksize
611 if n == -1 or cur + n > end:
612 n = end - cur
613 return self._stream.read(n)
614
615
616class HttpRequest(object):
617 """Encapsulates a single HTTP request."""
618
619 @util.positional(4)
620 def __init__(self, http, postproc, uri,
621 method='GET',
622 body=None,
623 headers=None,
624 methodId=None,
625 resumable=None):
626 """Constructor for an HttpRequest.
627
628 Args:
629 http: httplib2.Http, the transport object to use to make a request
630 postproc: callable, called on the HTTP response and content to transform
631 it into a data object before returning, or raising an exception
632 on an error.
633 uri: string, the absolute URI to send the request to
634 method: string, the HTTP method to use
635 body: string, the request body of the HTTP request,
636 headers: dict, the HTTP request headers
637 methodId: string, a unique identifier for the API method being called.
638 resumable: MediaUpload, None if this is not a resumbale request.
639 """
640 self.uri = uri
641 self.method = method
642 self.body = body
643 self.headers = headers or {}
644 self.methodId = methodId
645 self.http = http
646 self.postproc = postproc
647 self.resumable = resumable
648 self.response_callbacks = []
649 self._in_error_state = False
650
651 # Pull the multipart boundary out of the content-type header.
652 major, minor, params = mimeparse.parse_mime_type(
653 headers.get('content-type', 'application/json'))
654
655 # The size of the non-media part of the request.
656 self.body_size = len(self.body or '')
657
658 # The resumable URI to send chunks to.
659 self.resumable_uri = None
660
661 # The bytes that have been uploaded.
662 self.resumable_progress = 0
663
664 # Stubs for testing.
665 self._rand = random.random
666 self._sleep = time.sleep
667
668 @util.positional(1)
669 def execute(self, http=None, num_retries=0):
670 """Execute the request.
671
672 Args:
673 http: httplib2.Http, an http object to be used in place of the
674 one the HttpRequest request object was constructed with.
675 num_retries: Integer, number of times to retry 500's with randomized
676 exponential backoff. If all retries fail, the raised HttpError
677 represents the last request. If zero (default), we attempt the
678 request only once.
679
680 Returns:
681 A deserialized object model of the response body as determined
682 by the postproc.
683
684 Raises:
685 googleapiclient.errors.HttpError if the response was not a 2xx.
686 httplib2.HttpLib2Error if a transport error has occured.
687 """
688 if http is None:
689 http = self.http
690
691 if self.resumable:
692 body = None
693 while body is None:
694 _, body = self.next_chunk(http=http, num_retries=num_retries)
695 return body
696
697 # Non-resumable case.
698
699 if 'content-length' not in self.headers:
700 self.headers['content-length'] = str(self.body_size)
701 # If the request URI is too long then turn it into a POST request.
702 if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET':
703 self.method = 'POST'
704 self.headers['x-http-method-override'] = 'GET'
705 self.headers['content-type'] = 'application/x-www-form-urlencoded'
Pat Ferated5b61bd2015-03-03 16:04:11 -0800706 parsed = urlparse(self.uri)
707 self.uri = urlunparse(
John Asmuth864311d2014-04-24 15:46:08 -0400708 (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
709 None)
710 )
711 self.body = parsed.query
712 self.headers['content-length'] = str(len(self.body))
713
714 # Handle retries for server-side errors.
INADA Naokie4ea1a92015-03-04 03:45:42 +0900715 for retry_num in range(num_retries + 1):
John Asmuth864311d2014-04-24 15:46:08 -0400716 if retry_num > 0:
717 self._sleep(self._rand() * 2**retry_num)
718 logging.warning('Retry #%d for request: %s %s, following status: %d'
719 % (retry_num, self.method, self.uri, resp.status))
720
721 resp, content = http.request(str(self.uri), method=str(self.method),
722 body=self.body, headers=self.headers)
723 if resp.status < 500:
724 break
725
726 for callback in self.response_callbacks:
727 callback(resp)
728 if resp.status >= 300:
729 raise HttpError(resp, content, uri=self.uri)
730 return self.postproc(resp, content)
731
732 @util.positional(2)
733 def add_response_callback(self, cb):
734 """add_response_headers_callback
735
736 Args:
737 cb: Callback to be called on receiving the response headers, of signature:
738
739 def cb(resp):
740 # Where resp is an instance of httplib2.Response
741 """
742 self.response_callbacks.append(cb)
743
744 @util.positional(1)
745 def next_chunk(self, http=None, num_retries=0):
746 """Execute the next step of a resumable upload.
747
748 Can only be used if the method being executed supports media uploads and
749 the MediaUpload object passed in was flagged as using resumable upload.
750
751 Example:
752
753 media = MediaFileUpload('cow.png', mimetype='image/png',
754 chunksize=1000, resumable=True)
755 request = farm.animals().insert(
756 id='cow',
757 name='cow.png',
758 media_body=media)
759
760 response = None
761 while response is None:
762 status, response = request.next_chunk()
763 if status:
764 print "Upload %d%% complete." % int(status.progress() * 100)
765
766
767 Args:
768 http: httplib2.Http, an http object to be used in place of the
769 one the HttpRequest request object was constructed with.
770 num_retries: Integer, number of times to retry 500's with randomized
771 exponential backoff. If all retries fail, the raised HttpError
772 represents the last request. If zero (default), we attempt the
773 request only once.
774
775 Returns:
776 (status, body): (ResumableMediaStatus, object)
777 The body will be None until the resumable media is fully uploaded.
778
779 Raises:
780 googleapiclient.errors.HttpError if the response was not a 2xx.
781 httplib2.HttpLib2Error if a transport error has occured.
782 """
783 if http is None:
784 http = self.http
785
786 if self.resumable.size() is None:
787 size = '*'
788 else:
789 size = str(self.resumable.size())
790
791 if self.resumable_uri is None:
792 start_headers = copy.copy(self.headers)
793 start_headers['X-Upload-Content-Type'] = self.resumable.mimetype()
794 if size != '*':
795 start_headers['X-Upload-Content-Length'] = size
796 start_headers['content-length'] = str(self.body_size)
797
INADA Naokie4ea1a92015-03-04 03:45:42 +0900798 for retry_num in range(num_retries + 1):
John Asmuth864311d2014-04-24 15:46:08 -0400799 if retry_num > 0:
800 self._sleep(self._rand() * 2**retry_num)
801 logging.warning(
802 'Retry #%d for resumable URI request: %s %s, following status: %d'
803 % (retry_num, self.method, self.uri, resp.status))
804
805 resp, content = http.request(self.uri, method=self.method,
806 body=self.body,
807 headers=start_headers)
808 if resp.status < 500:
809 break
810
811 if resp.status == 200 and 'location' in resp:
812 self.resumable_uri = resp['location']
813 else:
814 raise ResumableUploadError(resp, content)
815 elif self._in_error_state:
816 # If we are in an error state then query the server for current state of
817 # the upload by sending an empty PUT and reading the 'range' header in
818 # the response.
819 headers = {
820 'Content-Range': 'bytes */%s' % size,
821 'content-length': '0'
822 }
823 resp, content = http.request(self.resumable_uri, 'PUT',
824 headers=headers)
825 status, body = self._process_response(resp, content)
826 if body:
827 # The upload was complete.
828 return (status, body)
829
830 # The httplib.request method can take streams for the body parameter, but
831 # only in Python 2.6 or later. If a stream is available under those
832 # conditions then use it as the body argument.
833 if self.resumable.has_stream() and sys.version_info[1] >= 6:
834 data = self.resumable.stream()
835 if self.resumable.chunksize() == -1:
836 data.seek(self.resumable_progress)
837 chunk_end = self.resumable.size() - self.resumable_progress - 1
838 else:
839 # Doing chunking with a stream, so wrap a slice of the stream.
840 data = _StreamSlice(data, self.resumable_progress,
841 self.resumable.chunksize())
842 chunk_end = min(
843 self.resumable_progress + self.resumable.chunksize() - 1,
844 self.resumable.size() - 1)
845 else:
846 data = self.resumable.getbytes(
847 self.resumable_progress, self.resumable.chunksize())
848
849 # A short read implies that we are at EOF, so finish the upload.
850 if len(data) < self.resumable.chunksize():
851 size = str(self.resumable_progress + len(data))
852
853 chunk_end = self.resumable_progress + len(data) - 1
854
855 headers = {
856 'Content-Range': 'bytes %d-%d/%s' % (
857 self.resumable_progress, chunk_end, size),
858 # Must set the content-length header here because httplib can't
859 # calculate the size when working with _StreamSlice.
860 'Content-Length': str(chunk_end - self.resumable_progress + 1)
861 }
862
INADA Naokie4ea1a92015-03-04 03:45:42 +0900863 for retry_num in range(num_retries + 1):
John Asmuth864311d2014-04-24 15:46:08 -0400864 if retry_num > 0:
865 self._sleep(self._rand() * 2**retry_num)
866 logging.warning(
867 'Retry #%d for media upload: %s %s, following status: %d'
868 % (retry_num, self.method, self.uri, resp.status))
869
870 try:
871 resp, content = http.request(self.resumable_uri, method='PUT',
872 body=data,
873 headers=headers)
874 except:
875 self._in_error_state = True
876 raise
877 if resp.status < 500:
878 break
879
880 return self._process_response(resp, content)
881
882 def _process_response(self, resp, content):
883 """Process the response from a single chunk upload.
884
885 Args:
886 resp: httplib2.Response, the response object.
887 content: string, the content of the response.
888
889 Returns:
890 (status, body): (ResumableMediaStatus, object)
891 The body will be None until the resumable media is fully uploaded.
892
893 Raises:
894 googleapiclient.errors.HttpError if the response was not a 2xx or a 308.
895 """
896 if resp.status in [200, 201]:
897 self._in_error_state = False
898 return None, self.postproc(resp, content)
899 elif resp.status == 308:
900 self._in_error_state = False
901 # A "308 Resume Incomplete" indicates we are not done.
902 self.resumable_progress = int(resp['range'].split('-')[1]) + 1
903 if 'location' in resp:
904 self.resumable_uri = resp['location']
905 else:
906 self._in_error_state = True
907 raise HttpError(resp, content, uri=self.uri)
908
909 return (MediaUploadProgress(self.resumable_progress, self.resumable.size()),
910 None)
911
912 def to_json(self):
913 """Returns a JSON representation of the HttpRequest."""
914 d = copy.copy(self.__dict__)
915 if d['resumable'] is not None:
916 d['resumable'] = self.resumable.to_json()
917 del d['http']
918 del d['postproc']
919 del d['_sleep']
920 del d['_rand']
921
Craig Citro6ae34d72014-08-18 23:10:09 -0700922 return json.dumps(d)
John Asmuth864311d2014-04-24 15:46:08 -0400923
924 @staticmethod
925 def from_json(s, http, postproc):
926 """Returns an HttpRequest populated with info from a JSON object."""
Craig Citro6ae34d72014-08-18 23:10:09 -0700927 d = json.loads(s)
John Asmuth864311d2014-04-24 15:46:08 -0400928 if d['resumable'] is not None:
929 d['resumable'] = MediaUpload.new_from_json(d['resumable'])
930 return HttpRequest(
931 http,
932 postproc,
933 uri=d['uri'],
934 method=d['method'],
935 body=d['body'],
936 headers=d['headers'],
937 methodId=d['methodId'],
938 resumable=d['resumable'])
939
940
941class BatchHttpRequest(object):
942 """Batches multiple HttpRequest objects into a single HTTP request.
943
944 Example:
945 from googleapiclient.http import BatchHttpRequest
946
947 def list_animals(request_id, response, exception):
948 \"\"\"Do something with the animals list response.\"\"\"
949 if exception is not None:
950 # Do something with the exception.
951 pass
952 else:
953 # Do something with the response.
954 pass
955
956 def list_farmers(request_id, response, exception):
957 \"\"\"Do something with the farmers list response.\"\"\"
958 if exception is not None:
959 # Do something with the exception.
960 pass
961 else:
962 # Do something with the response.
963 pass
964
965 service = build('farm', 'v2')
966
967 batch = BatchHttpRequest()
968
969 batch.add(service.animals().list(), list_animals)
970 batch.add(service.farmers().list(), list_farmers)
971 batch.execute(http=http)
972 """
973
974 @util.positional(1)
975 def __init__(self, callback=None, batch_uri=None):
976 """Constructor for a BatchHttpRequest.
977
978 Args:
979 callback: callable, A callback to be called for each response, of the
980 form callback(id, response, exception). The first parameter is the
981 request id, and the second is the deserialized response object. The
982 third is an googleapiclient.errors.HttpError exception object if an HTTP error
983 occurred while processing the request, or None if no error occurred.
984 batch_uri: string, URI to send batch requests to.
985 """
986 if batch_uri is None:
987 batch_uri = 'https://www.googleapis.com/batch'
988 self._batch_uri = batch_uri
989
990 # Global callback to be called for each individual response in the batch.
991 self._callback = callback
992
993 # A map from id to request.
994 self._requests = {}
995
996 # A map from id to callback.
997 self._callbacks = {}
998
999 # List of request ids, in the order in which they were added.
1000 self._order = []
1001
1002 # The last auto generated id.
1003 self._last_auto_id = 0
1004
1005 # Unique ID on which to base the Content-ID headers.
1006 self._base_id = None
1007
1008 # A map from request id to (httplib2.Response, content) response pairs
1009 self._responses = {}
1010
1011 # A map of id(Credentials) that have been refreshed.
1012 self._refreshed_credentials = {}
1013
1014 def _refresh_and_apply_credentials(self, request, http):
1015 """Refresh the credentials and apply to the request.
1016
1017 Args:
1018 request: HttpRequest, the request.
1019 http: httplib2.Http, the global http object for the batch.
1020 """
1021 # For the credentials to refresh, but only once per refresh_token
1022 # If there is no http per the request then refresh the http passed in
1023 # via execute()
1024 creds = None
1025 if request.http is not None and hasattr(request.http.request,
1026 'credentials'):
1027 creds = request.http.request.credentials
1028 elif http is not None and hasattr(http.request, 'credentials'):
1029 creds = http.request.credentials
1030 if creds is not None:
1031 if id(creds) not in self._refreshed_credentials:
1032 creds.refresh(http)
1033 self._refreshed_credentials[id(creds)] = 1
1034
1035 # Only apply the credentials if we are using the http object passed in,
1036 # otherwise apply() will get called during _serialize_request().
1037 if request.http is None or not hasattr(request.http.request,
1038 'credentials'):
1039 creds.apply(request.headers)
1040
1041 def _id_to_header(self, id_):
1042 """Convert an id to a Content-ID header value.
1043
1044 Args:
1045 id_: string, identifier of individual request.
1046
1047 Returns:
1048 A Content-ID header with the id_ encoded into it. A UUID is prepended to
1049 the value because Content-ID headers are supposed to be universally
1050 unique.
1051 """
1052 if self._base_id is None:
1053 self._base_id = uuid.uuid4()
1054
Pat Ferated5b61bd2015-03-03 16:04:11 -08001055 return '<%s+%s>' % (self._base_id, quote(id_))
John Asmuth864311d2014-04-24 15:46:08 -04001056
1057 def _header_to_id(self, header):
1058 """Convert a Content-ID header value to an id.
1059
1060 Presumes the Content-ID header conforms to the format that _id_to_header()
1061 returns.
1062
1063 Args:
1064 header: string, Content-ID header value.
1065
1066 Returns:
1067 The extracted id value.
1068
1069 Raises:
1070 BatchError if the header is not in the expected format.
1071 """
1072 if header[0] != '<' or header[-1] != '>':
1073 raise BatchError("Invalid value for Content-ID: %s" % header)
1074 if '+' not in header:
1075 raise BatchError("Invalid value for Content-ID: %s" % header)
1076 base, id_ = header[1:-1].rsplit('+', 1)
1077
Pat Ferated5b61bd2015-03-03 16:04:11 -08001078 return unquote(id_)
John Asmuth864311d2014-04-24 15:46:08 -04001079
1080 def _serialize_request(self, request):
1081 """Convert an HttpRequest object into a string.
1082
1083 Args:
1084 request: HttpRequest, the request to serialize.
1085
1086 Returns:
1087 The request as a string in application/http format.
1088 """
1089 # Construct status line
Pat Ferated5b61bd2015-03-03 16:04:11 -08001090 parsed = urlparse(request.uri)
1091 request_line = urlunparse(
John Asmuth864311d2014-04-24 15:46:08 -04001092 (None, None, parsed.path, parsed.params, parsed.query, None)
1093 )
1094 status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
1095 major, minor = request.headers.get('content-type', 'application/json').split('/')
1096 msg = MIMENonMultipart(major, minor)
1097 headers = request.headers.copy()
1098
1099 if request.http is not None and hasattr(request.http.request,
1100 'credentials'):
1101 request.http.request.credentials.apply(headers)
1102
1103 # MIMENonMultipart adds its own Content-Type header.
1104 if 'content-type' in headers:
1105 del headers['content-type']
1106
INADA Naokie4ea1a92015-03-04 03:45:42 +09001107 for key, value in six.iteritems(headers):
John Asmuth864311d2014-04-24 15:46:08 -04001108 msg[key] = value
1109 msg['Host'] = parsed.netloc
1110 msg.set_unixfrom(None)
1111
1112 if request.body is not None:
1113 msg.set_payload(request.body)
1114 msg['content-length'] = str(len(request.body))
1115
1116 # Serialize the mime message.
Pat Ferateed9affd2015-03-03 16:03:15 -08001117 fp = StringIO()
John Asmuth864311d2014-04-24 15:46:08 -04001118 # maxheaderlen=0 means don't line wrap headers.
1119 g = Generator(fp, maxheaderlen=0)
1120 g.flatten(msg, unixfrom=False)
1121 body = fp.getvalue()
1122
1123 # Strip off the \n\n that the MIME lib tacks onto the end of the payload.
1124 if request.body is None:
1125 body = body[:-2]
1126
1127 return status_line.encode('utf-8') + body
1128
1129 def _deserialize_response(self, payload):
1130 """Convert string into httplib2 response and content.
1131
1132 Args:
1133 payload: string, headers and body as a string.
1134
1135 Returns:
1136 A pair (resp, content), such as would be returned from httplib2.request.
1137 """
1138 # Strip off the status line
1139 status_line, payload = payload.split('\n', 1)
1140 protocol, status, reason = status_line.split(' ', 2)
1141
1142 # Parse the rest of the response
1143 parser = FeedParser()
1144 parser.feed(payload)
1145 msg = parser.close()
1146 msg['status'] = status
1147
1148 # Create httplib2.Response from the parsed headers.
1149 resp = httplib2.Response(msg)
1150 resp.reason = reason
1151 resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
1152
1153 content = payload.split('\r\n\r\n', 1)[1]
1154
1155 return resp, content
1156
1157 def _new_id(self):
1158 """Create a new id.
1159
1160 Auto incrementing number that avoids conflicts with ids already used.
1161
1162 Returns:
1163 string, a new unique id.
1164 """
1165 self._last_auto_id += 1
1166 while str(self._last_auto_id) in self._requests:
1167 self._last_auto_id += 1
1168 return str(self._last_auto_id)
1169
1170 @util.positional(2)
1171 def add(self, request, callback=None, request_id=None):
1172 """Add a new request.
1173
1174 Every callback added will be paired with a unique id, the request_id. That
1175 unique id will be passed back to the callback when the response comes back
1176 from the server. The default behavior is to have the library generate it's
1177 own unique id. If the caller passes in a request_id then they must ensure
1178 uniqueness for each request_id, and if they are not an exception is
1179 raised. Callers should either supply all request_ids or nevery supply a
1180 request id, to avoid such an error.
1181
1182 Args:
1183 request: HttpRequest, Request to add to the batch.
1184 callback: callable, A callback to be called for this response, of the
1185 form callback(id, response, exception). The first parameter is the
1186 request id, and the second is the deserialized response object. The
1187 third is an googleapiclient.errors.HttpError exception object if an HTTP error
1188 occurred while processing the request, or None if no errors occurred.
1189 request_id: string, A unique id for the request. The id will be passed to
1190 the callback with the response.
1191
1192 Returns:
1193 None
1194
1195 Raises:
1196 BatchError if a media request is added to a batch.
1197 KeyError is the request_id is not unique.
1198 """
1199 if request_id is None:
1200 request_id = self._new_id()
1201 if request.resumable is not None:
1202 raise BatchError("Media requests cannot be used in a batch request.")
1203 if request_id in self._requests:
1204 raise KeyError("A request with this ID already exists: %s" % request_id)
1205 self._requests[request_id] = request
1206 self._callbacks[request_id] = callback
1207 self._order.append(request_id)
1208
1209 def _execute(self, http, order, requests):
1210 """Serialize batch request, send to server, process response.
1211
1212 Args:
1213 http: httplib2.Http, an http object to be used to make the request with.
1214 order: list, list of request ids in the order they were added to the
1215 batch.
1216 request: list, list of request objects to send.
1217
1218 Raises:
1219 httplib2.HttpLib2Error if a transport error has occured.
1220 googleapiclient.errors.BatchError if the response is the wrong format.
1221 """
1222 message = MIMEMultipart('mixed')
1223 # Message should not write out it's own headers.
1224 setattr(message, '_write_headers', lambda self: None)
1225
1226 # Add all the individual requests.
1227 for request_id in order:
1228 request = requests[request_id]
1229
1230 msg = MIMENonMultipart('application', 'http')
1231 msg['Content-Transfer-Encoding'] = 'binary'
1232 msg['Content-ID'] = self._id_to_header(request_id)
1233
1234 body = self._serialize_request(request)
1235 msg.set_payload(body)
1236 message.attach(msg)
1237
Craig Citro72389b72014-07-15 17:12:50 -07001238 # encode the body: note that we can't use `as_string`, because
1239 # it plays games with `From ` lines.
Pat Ferateed9affd2015-03-03 16:03:15 -08001240 fp = StringIO()
Craig Citro72389b72014-07-15 17:12:50 -07001241 g = Generator(fp, mangle_from_=False)
1242 g.flatten(message, unixfrom=False)
1243 body = fp.getvalue()
John Asmuth864311d2014-04-24 15:46:08 -04001244
1245 headers = {}
1246 headers['content-type'] = ('multipart/mixed; '
1247 'boundary="%s"') % message.get_boundary()
1248
1249 resp, content = http.request(self._batch_uri, method='POST', body=body,
1250 headers=headers)
1251
1252 if resp.status >= 300:
1253 raise HttpError(resp, content, uri=self._batch_uri)
1254
1255 # Now break out the individual responses and store each one.
1256 boundary, _ = content.split(None, 1)
1257
1258 # Prepend with a content-type header so FeedParser can handle it.
1259 header = 'content-type: %s\r\n\r\n' % resp['content-type']
1260 for_parser = header + content
1261
1262 parser = FeedParser()
1263 parser.feed(for_parser)
1264 mime_response = parser.close()
1265
1266 if not mime_response.is_multipart():
1267 raise BatchError("Response not in multipart/mixed format.", resp=resp,
1268 content=content)
1269
1270 for part in mime_response.get_payload():
1271 request_id = self._header_to_id(part['Content-ID'])
1272 response, content = self._deserialize_response(part.get_payload())
1273 self._responses[request_id] = (response, content)
1274
1275 @util.positional(1)
1276 def execute(self, http=None):
1277 """Execute all the requests as a single batched HTTP request.
1278
1279 Args:
1280 http: httplib2.Http, an http object to be used in place of the one the
1281 HttpRequest request object was constructed with. If one isn't supplied
1282 then use a http object from the requests in this batch.
1283
1284 Returns:
1285 None
1286
1287 Raises:
1288 httplib2.HttpLib2Error if a transport error has occured.
1289 googleapiclient.errors.BatchError if the response is the wrong format.
1290 """
1291
1292 # If http is not supplied use the first valid one given in the requests.
1293 if http is None:
1294 for request_id in self._order:
1295 request = self._requests[request_id]
1296 if request is not None:
1297 http = request.http
1298 break
1299
1300 if http is None:
1301 raise ValueError("Missing a valid http object.")
1302
1303 self._execute(http, self._order, self._requests)
1304
1305 # Loop over all the requests and check for 401s. For each 401 request the
1306 # credentials should be refreshed and then sent again in a separate batch.
1307 redo_requests = {}
1308 redo_order = []
1309
1310 for request_id in self._order:
1311 resp, content = self._responses[request_id]
1312 if resp['status'] == '401':
1313 redo_order.append(request_id)
1314 request = self._requests[request_id]
1315 self._refresh_and_apply_credentials(request, http)
1316 redo_requests[request_id] = request
1317
1318 if redo_requests:
1319 self._execute(http, redo_order, redo_requests)
1320
1321 # Now process all callbacks that are erroring, and raise an exception for
1322 # ones that return a non-2xx response? Or add extra parameter to callback
1323 # that contains an HttpError?
1324
1325 for request_id in self._order:
1326 resp, content = self._responses[request_id]
1327
1328 request = self._requests[request_id]
1329 callback = self._callbacks[request_id]
1330
1331 response = None
1332 exception = None
1333 try:
1334 if resp.status >= 300:
1335 raise HttpError(resp, content, uri=request.uri)
1336 response = request.postproc(resp, content)
INADA Naokic1505df2014-08-20 15:19:53 +09001337 except HttpError as e:
John Asmuth864311d2014-04-24 15:46:08 -04001338 exception = e
1339
1340 if callback is not None:
1341 callback(request_id, response, exception)
1342 if self._callback is not None:
1343 self._callback(request_id, response, exception)
1344
1345
1346class HttpRequestMock(object):
1347 """Mock of HttpRequest.
1348
1349 Do not construct directly, instead use RequestMockBuilder.
1350 """
1351
1352 def __init__(self, resp, content, postproc):
1353 """Constructor for HttpRequestMock
1354
1355 Args:
1356 resp: httplib2.Response, the response to emulate coming from the request
1357 content: string, the response body
1358 postproc: callable, the post processing function usually supplied by
1359 the model class. See model.JsonModel.response() as an example.
1360 """
1361 self.resp = resp
1362 self.content = content
1363 self.postproc = postproc
1364 if resp is None:
1365 self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
1366 if 'reason' in self.resp:
1367 self.resp.reason = self.resp['reason']
1368
1369 def execute(self, http=None):
1370 """Execute the request.
1371
1372 Same behavior as HttpRequest.execute(), but the response is
1373 mocked and not really from an HTTP request/response.
1374 """
1375 return self.postproc(self.resp, self.content)
1376
1377
1378class RequestMockBuilder(object):
1379 """A simple mock of HttpRequest
1380
1381 Pass in a dictionary to the constructor that maps request methodIds to
1382 tuples of (httplib2.Response, content, opt_expected_body) that should be
1383 returned when that method is called. None may also be passed in for the
1384 httplib2.Response, in which case a 200 OK response will be generated.
1385 If an opt_expected_body (str or dict) is provided, it will be compared to
1386 the body and UnexpectedBodyError will be raised on inequality.
1387
1388 Example:
1389 response = '{"data": {"id": "tag:google.c...'
1390 requestBuilder = RequestMockBuilder(
1391 {
1392 'plus.activities.get': (None, response),
1393 }
1394 )
1395 googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
1396
1397 Methods that you do not supply a response for will return a
1398 200 OK with an empty string as the response content or raise an excpetion
1399 if check_unexpected is set to True. The methodId is taken from the rpcName
1400 in the discovery document.
1401
1402 For more details see the project wiki.
1403 """
1404
1405 def __init__(self, responses, check_unexpected=False):
1406 """Constructor for RequestMockBuilder
1407
1408 The constructed object should be a callable object
1409 that can replace the class HttpResponse.
1410
1411 responses - A dictionary that maps methodIds into tuples
1412 of (httplib2.Response, content). The methodId
1413 comes from the 'rpcName' field in the discovery
1414 document.
1415 check_unexpected - A boolean setting whether or not UnexpectedMethodError
1416 should be raised on unsupplied method.
1417 """
1418 self.responses = responses
1419 self.check_unexpected = check_unexpected
1420
1421 def __call__(self, http, postproc, uri, method='GET', body=None,
1422 headers=None, methodId=None, resumable=None):
1423 """Implements the callable interface that discovery.build() expects
1424 of requestBuilder, which is to build an object compatible with
1425 HttpRequest.execute(). See that method for the description of the
1426 parameters and the expected response.
1427 """
1428 if methodId in self.responses:
1429 response = self.responses[methodId]
1430 resp, content = response[:2]
1431 if len(response) > 2:
1432 # Test the body against the supplied expected_body.
1433 expected_body = response[2]
1434 if bool(expected_body) != bool(body):
1435 # Not expecting a body and provided one
1436 # or expecting a body and not provided one.
1437 raise UnexpectedBodyError(expected_body, body)
1438 if isinstance(expected_body, str):
Craig Citro6ae34d72014-08-18 23:10:09 -07001439 expected_body = json.loads(expected_body)
1440 body = json.loads(body)
John Asmuth864311d2014-04-24 15:46:08 -04001441 if body != expected_body:
1442 raise UnexpectedBodyError(expected_body, body)
1443 return HttpRequestMock(resp, content, postproc)
1444 elif self.check_unexpected:
1445 raise UnexpectedMethodError(methodId=methodId)
1446 else:
1447 model = JsonModel(False)
1448 return HttpRequestMock(None, '{}', model.response)
1449
1450
1451class HttpMock(object):
1452 """Mock of httplib2.Http"""
1453
1454 def __init__(self, filename=None, headers=None):
1455 """
1456 Args:
1457 filename: string, absolute filename to read response from
1458 headers: dict, header to return with response
1459 """
1460 if headers is None:
1461 headers = {'status': '200 OK'}
1462 if filename:
INADA Naokie4ea1a92015-03-04 03:45:42 +09001463 f = open(filename, 'r')
John Asmuth864311d2014-04-24 15:46:08 -04001464 self.data = f.read()
1465 f.close()
1466 else:
1467 self.data = None
1468 self.response_headers = headers
1469 self.headers = None
1470 self.uri = None
1471 self.method = None
1472 self.body = None
1473 self.headers = None
1474
1475
1476 def request(self, uri,
1477 method='GET',
1478 body=None,
1479 headers=None,
1480 redirections=1,
1481 connection_type=None):
1482 self.uri = uri
1483 self.method = method
1484 self.body = body
1485 self.headers = headers
1486 return httplib2.Response(self.response_headers), self.data
1487
1488
1489class HttpMockSequence(object):
1490 """Mock of httplib2.Http
1491
1492 Mocks a sequence of calls to request returning different responses for each
1493 call. Create an instance initialized with the desired response headers
1494 and content and then use as if an httplib2.Http instance.
1495
1496 http = HttpMockSequence([
1497 ({'status': '401'}, ''),
1498 ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
1499 ({'status': '200'}, 'echo_request_headers'),
1500 ])
1501 resp, content = http.request("http://examples.com")
1502
1503 There are special values you can pass in for content to trigger
1504 behavours that are helpful in testing.
1505
1506 'echo_request_headers' means return the request headers in the response body
1507 'echo_request_headers_as_json' means return the request headers in
1508 the response body
1509 'echo_request_body' means return the request body in the response body
1510 'echo_request_uri' means return the request uri in the response body
1511 """
1512
1513 def __init__(self, iterable):
1514 """
1515 Args:
1516 iterable: iterable, a sequence of pairs of (headers, body)
1517 """
1518 self._iterable = iterable
1519 self.follow_redirects = True
1520
1521 def request(self, uri,
1522 method='GET',
1523 body=None,
1524 headers=None,
1525 redirections=1,
1526 connection_type=None):
1527 resp, content = self._iterable.pop(0)
1528 if content == 'echo_request_headers':
1529 content = headers
1530 elif content == 'echo_request_headers_as_json':
Craig Citro6ae34d72014-08-18 23:10:09 -07001531 content = json.dumps(headers)
John Asmuth864311d2014-04-24 15:46:08 -04001532 elif content == 'echo_request_body':
1533 if hasattr(body, 'read'):
1534 content = body.read()
1535 else:
1536 content = body
1537 elif content == 'echo_request_uri':
1538 content = uri
1539 return httplib2.Response(resp), content
1540
1541
1542def set_user_agent(http, user_agent):
1543 """Set the user-agent on every request.
1544
1545 Args:
1546 http - An instance of httplib2.Http
1547 or something that acts like it.
1548 user_agent: string, the value for the user-agent header.
1549
1550 Returns:
1551 A modified instance of http that was passed in.
1552
1553 Example:
1554
1555 h = httplib2.Http()
1556 h = set_user_agent(h, "my-app-name/6.0")
1557
1558 Most of the time the user-agent will be set doing auth, this is for the rare
1559 cases where you are accessing an unauthenticated endpoint.
1560 """
1561 request_orig = http.request
1562
1563 # The closure that will replace 'httplib2.Http.request'.
1564 def new_request(uri, method='GET', body=None, headers=None,
1565 redirections=httplib2.DEFAULT_MAX_REDIRECTS,
1566 connection_type=None):
1567 """Modify the request headers to add the user-agent."""
1568 if headers is None:
1569 headers = {}
1570 if 'user-agent' in headers:
1571 headers['user-agent'] = user_agent + ' ' + headers['user-agent']
1572 else:
1573 headers['user-agent'] = user_agent
1574 resp, content = request_orig(uri, method, body, headers,
1575 redirections, connection_type)
1576 return resp, content
1577
1578 http.request = new_request
1579 return http
1580
1581
1582def tunnel_patch(http):
1583 """Tunnel PATCH requests over POST.
1584 Args:
1585 http - An instance of httplib2.Http
1586 or something that acts like it.
1587
1588 Returns:
1589 A modified instance of http that was passed in.
1590
1591 Example:
1592
1593 h = httplib2.Http()
1594 h = tunnel_patch(h, "my-app-name/6.0")
1595
1596 Useful if you are running on a platform that doesn't support PATCH.
1597 Apply this last if you are using OAuth 1.0, as changing the method
1598 will result in a different signature.
1599 """
1600 request_orig = http.request
1601
1602 # The closure that will replace 'httplib2.Http.request'.
1603 def new_request(uri, method='GET', body=None, headers=None,
1604 redirections=httplib2.DEFAULT_MAX_REDIRECTS,
1605 connection_type=None):
1606 """Modify the request headers to add the user-agent."""
1607 if headers is None:
1608 headers = {}
1609 if method == 'PATCH':
1610 if 'oauth_token' in headers.get('authorization', ''):
1611 logging.warning(
1612 'OAuth 1.0 request made with Credentials after tunnel_patch.')
1613 headers['x-http-method-override'] = "PATCH"
1614 method = 'POST'
1615 resp, content = request_orig(uri, method, body, headers,
1616 redirections, connection_type)
1617 return resp, content
1618
1619 http.request = new_request
1620 return http