Package googleapiclient :: Module http
[hide private]
[frames] | no frames]

Source Code for Module googleapiclient.http

   1  # Copyright 2014 Google Inc. All Rights Reserved. 
   2  # 
   3  # Licensed under the Apache License, Version 2.0 (the "License"); 
   4  # you may not use this file except in compliance with the License. 
   5  # You may obtain a copy of the License at 
   6  # 
   7  #      http://www.apache.org/licenses/LICENSE-2.0 
   8  # 
   9  # Unless required by applicable law or agreed to in writing, software 
  10  # distributed under the License is distributed on an "AS IS" BASIS, 
  11  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
  12  # See the License for the specific language governing permissions and 
  13  # limitations under the License. 
  14   
  15  """Classes to encapsulate a single HTTP request. 
  16   
  17  The classes implement a command pattern, with every 
  18  object supporting an execute() method that does the 
  19  actuall HTTP request. 
  20  """ 
  21   
  22  __author__ = 'jcgregorio@google.com (Joe Gregorio)' 
  23   
  24  import StringIO 
  25  import base64 
  26  import copy 
  27  import gzip 
  28  import httplib2 
  29  import json 
  30  import logging 
  31  import mimeparse 
  32  import mimetypes 
  33  import os 
  34  import random 
  35  import sys 
  36  import time 
  37  import urllib 
  38  import urlparse 
  39  import uuid 
  40   
  41  from email.generator import Generator 
  42  from email.mime.multipart import MIMEMultipart 
  43  from email.mime.nonmultipart import MIMENonMultipart 
  44  from email.parser import FeedParser 
  45  from errors import BatchError 
  46  from errors import HttpError 
  47  from errors import InvalidChunkSizeError 
  48  from errors import ResumableUploadError 
  49  from errors import UnexpectedBodyError 
  50  from errors import UnexpectedMethodError 
  51  from model import JsonModel 
  52  from oauth2client import util 
  53   
  54   
  55  DEFAULT_CHUNK_SIZE = 512*1024 
  56   
  57  MAX_URI_LENGTH = 2048 
58 59 60 -class MediaUploadProgress(object):
61 """Status of a resumable upload.""" 62
63 - def __init__(self, resumable_progress, total_size):
64 """Constructor. 65 66 Args: 67 resumable_progress: int, bytes sent so far. 68 total_size: int, total bytes in complete upload, or None if the total 69 upload size isn't known ahead of time. 70 """ 71 self.resumable_progress = resumable_progress 72 self.total_size = total_size
73
74 - def progress(self):
75 """Percent of upload completed, as a float. 76 77 Returns: 78 the percentage complete as a float, returning 0.0 if the total size of 79 the upload is unknown. 80 """ 81 if self.total_size is not None: 82 return float(self.resumable_progress) / float(self.total_size) 83 else: 84 return 0.0
85
86 87 -class MediaDownloadProgress(object):
88 """Status of a resumable download.""" 89
90 - def __init__(self, resumable_progress, total_size):
91 """Constructor. 92 93 Args: 94 resumable_progress: int, bytes received so far. 95 total_size: int, total bytes in complete download. 96 """ 97 self.resumable_progress = resumable_progress 98 self.total_size = total_size
99
100 - def progress(self):
101 """Percent of download completed, as a float. 102 103 Returns: 104 the percentage complete as a float, returning 0.0 if the total size of 105 the download is unknown. 106 """ 107 if self.total_size is not None: 108 return float(self.resumable_progress) / float(self.total_size) 109 else: 110 return 0.0
111
112 113 -class MediaUpload(object):
114 """Describes a media object to upload. 115 116 Base class that defines the interface of MediaUpload subclasses. 117 118 Note that subclasses of MediaUpload may allow you to control the chunksize 119 when uploading a media object. It is important to keep the size of the chunk 120 as large as possible to keep the upload efficient. Other factors may influence 121 the size of the chunk you use, particularly if you are working in an 122 environment where individual HTTP requests may have a hardcoded time limit, 123 such as under certain classes of requests under Google App Engine. 124 125 Streams are io.Base compatible objects that support seek(). Some MediaUpload 126 subclasses support using streams directly to upload data. Support for 127 streaming may be indicated by a MediaUpload sub-class and if appropriate for a 128 platform that stream will be used for uploading the media object. The support 129 for streaming is indicated by has_stream() returning True. The stream() method 130 should return an io.Base object that supports seek(). On platforms where the 131 underlying httplib module supports streaming, for example Python 2.6 and 132 later, the stream will be passed into the http library which will result in 133 less memory being used and possibly faster uploads. 134 135 If you need to upload media that can't be uploaded using any of the existing 136 MediaUpload sub-class then you can sub-class MediaUpload for your particular 137 needs. 138 """ 139
140 - def chunksize(self):
141 """Chunk size for resumable uploads. 142 143 Returns: 144 Chunk size in bytes. 145 """ 146 raise NotImplementedError()
147
148 - def mimetype(self):
149 """Mime type of the body. 150 151 Returns: 152 Mime type. 153 """ 154 return 'application/octet-stream'
155
156 - def size(self):
157 """Size of upload. 158 159 Returns: 160 Size of the body, or None of the size is unknown. 161 """ 162 return None
163
164 - def resumable(self):
165 """Whether this upload is resumable. 166 167 Returns: 168 True if resumable upload or False. 169 """ 170 return False
171
172 - def getbytes(self, begin, end):
173 """Get bytes from the media. 174 175 Args: 176 begin: int, offset from beginning of file. 177 length: int, number of bytes to read, starting at begin. 178 179 Returns: 180 A string of bytes read. May be shorter than length if EOF was reached 181 first. 182 """ 183 raise NotImplementedError()
184
185 - def has_stream(self):
186 """Does the underlying upload support a streaming interface. 187 188 Streaming means it is an io.IOBase subclass that supports seek, i.e. 189 seekable() returns True. 190 191 Returns: 192 True if the call to stream() will return an instance of a seekable io.Base 193 subclass. 194 """ 195 return False
196
197 - def stream(self):
198 """A stream interface to the data being uploaded. 199 200 Returns: 201 The returned value is an io.IOBase subclass that supports seek, i.e. 202 seekable() returns True. 203 """ 204 raise NotImplementedError()
205 206 @util.positional(1)
207 - def _to_json(self, strip=None):
208 """Utility function for creating a JSON representation of a MediaUpload. 209 210 Args: 211 strip: array, An array of names of members to not include in the JSON. 212 213 Returns: 214 string, a JSON representation of this instance, suitable to pass to 215 from_json(). 216 """ 217 t = type(self) 218 d = copy.copy(self.__dict__) 219 if strip is not None: 220 for member in strip: 221 del d[member] 222 d['_class'] = t.__name__ 223 d['_module'] = t.__module__ 224 return json.dumps(d)
225
226 - def to_json(self):
227 """Create a JSON representation of an instance of MediaUpload. 228 229 Returns: 230 string, a JSON representation of this instance, suitable to pass to 231 from_json(). 232 """ 233 return self._to_json()
234 235 @classmethod
236 - def new_from_json(cls, s):
237 """Utility class method to instantiate a MediaUpload subclass from a JSON 238 representation produced by to_json(). 239 240 Args: 241 s: string, JSON from to_json(). 242 243 Returns: 244 An instance of the subclass of MediaUpload that was serialized with 245 to_json(). 246 """ 247 data = json.loads(s) 248 # Find and call the right classmethod from_json() to restore the object. 249 module = data['_module'] 250 m = __import__(module, fromlist=module.split('.')[:-1]) 251 kls = getattr(m, data['_class']) 252 from_json = getattr(kls, 'from_json') 253 return from_json(s)
254
255 256 -class MediaIoBaseUpload(MediaUpload):
257 """A MediaUpload for a io.Base objects. 258 259 Note that the Python file object is compatible with io.Base and can be used 260 with this class also. 261 262 fh = io.BytesIO('...Some data to upload...') 263 media = MediaIoBaseUpload(fh, mimetype='image/png', 264 chunksize=1024*1024, resumable=True) 265 farm.animals().insert( 266 id='cow', 267 name='cow.png', 268 media_body=media).execute() 269 270 Depending on the platform you are working on, you may pass -1 as the 271 chunksize, which indicates that the entire file should be uploaded in a single 272 request. If the underlying platform supports streams, such as Python 2.6 or 273 later, then this can be very efficient as it avoids multiple connections, and 274 also avoids loading the entire file into memory before sending it. Note that 275 Google App Engine has a 5MB limit on request size, so you should never set 276 your chunksize larger than 5MB, or to -1. 277 """ 278 279 @util.positional(3)
280 - def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE, 281 resumable=False):
282 """Constructor. 283 284 Args: 285 fd: io.Base or file object, The source of the bytes to upload. MUST be 286 opened in blocking mode, do not use streams opened in non-blocking mode. 287 The given stream must be seekable, that is, it must be able to call 288 seek() on fd. 289 mimetype: string, Mime-type of the file. 290 chunksize: int, File will be uploaded in chunks of this many bytes. Only 291 used if resumable=True. Pass in a value of -1 if the file is to be 292 uploaded as a single chunk. Note that Google App Engine has a 5MB limit 293 on request size, so you should never set your chunksize larger than 5MB, 294 or to -1. 295 resumable: bool, True if this is a resumable upload. False means upload 296 in a single request. 297 """ 298 super(MediaIoBaseUpload, self).__init__() 299 self._fd = fd 300 self._mimetype = mimetype 301 if not (chunksize == -1 or chunksize > 0): 302 raise InvalidChunkSizeError() 303 self._chunksize = chunksize 304 self._resumable = resumable 305 306 self._fd.seek(0, os.SEEK_END) 307 self._size = self._fd.tell()
308
309 - def chunksize(self):
310 """Chunk size for resumable uploads. 311 312 Returns: 313 Chunk size in bytes. 314 """ 315 return self._chunksize
316
317 - def mimetype(self):
318 """Mime type of the body. 319 320 Returns: 321 Mime type. 322 """ 323 return self._mimetype
324
325 - def size(self):
326 """Size of upload. 327 328 Returns: 329 Size of the body, or None of the size is unknown. 330 """ 331 return self._size
332
333 - def resumable(self):
334 """Whether this upload is resumable. 335 336 Returns: 337 True if resumable upload or False. 338 """ 339 return self._resumable
340
341 - def getbytes(self, begin, length):
342 """Get bytes from the media. 343 344 Args: 345 begin: int, offset from beginning of file. 346 length: int, number of bytes to read, starting at begin. 347 348 Returns: 349 A string of bytes read. May be shorted than length if EOF was reached 350 first. 351 """ 352 self._fd.seek(begin) 353 return self._fd.read(length)
354
355 - def has_stream(self):
356 """Does the underlying upload support a streaming interface. 357 358 Streaming means it is an io.IOBase subclass that supports seek, i.e. 359 seekable() returns True. 360 361 Returns: 362 True if the call to stream() will return an instance of a seekable io.Base 363 subclass. 364 """ 365 return True
366
367 - def stream(self):
368 """A stream interface to the data being uploaded. 369 370 Returns: 371 The returned value is an io.IOBase subclass that supports seek, i.e. 372 seekable() returns True. 373 """ 374 return self._fd
375
376 - def to_json(self):
377 """This upload type is not serializable.""" 378 raise NotImplementedError('MediaIoBaseUpload is not serializable.')
379
380 381 -class MediaFileUpload(MediaIoBaseUpload):
382 """A MediaUpload for a file. 383 384 Construct a MediaFileUpload and pass as the media_body parameter of the 385 method. For example, if we had a service that allowed uploading images: 386 387 388 media = MediaFileUpload('cow.png', mimetype='image/png', 389 chunksize=1024*1024, resumable=True) 390 farm.animals().insert( 391 id='cow', 392 name='cow.png', 393 media_body=media).execute() 394 395 Depending on the platform you are working on, you may pass -1 as the 396 chunksize, which indicates that the entire file should be uploaded in a single 397 request. If the underlying platform supports streams, such as Python 2.6 or 398 later, then this can be very efficient as it avoids multiple connections, and 399 also avoids loading the entire file into memory before sending it. Note that 400 Google App Engine has a 5MB limit on request size, so you should never set 401 your chunksize larger than 5MB, or to -1. 402 """ 403 404 @util.positional(2)
405 - def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE, 406 resumable=False):
407 """Constructor. 408 409 Args: 410 filename: string, Name of the file. 411 mimetype: string, Mime-type of the file. If None then a mime-type will be 412 guessed from the file extension. 413 chunksize: int, File will be uploaded in chunks of this many bytes. Only 414 used if resumable=True. Pass in a value of -1 if the file is to be 415 uploaded in a single chunk. Note that Google App Engine has a 5MB limit 416 on request size, so you should never set your chunksize larger than 5MB, 417 or to -1. 418 resumable: bool, True if this is a resumable upload. False means upload 419 in a single request. 420 """ 421 self._filename = filename 422 fd = open(self._filename, 'rb') 423 if mimetype is None: 424 (mimetype, encoding) = mimetypes.guess_type(filename) 425 super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize, 426 resumable=resumable)
427
428 - def to_json(self):
429 """Creating a JSON representation of an instance of MediaFileUpload. 430 431 Returns: 432 string, a JSON representation of this instance, suitable to pass to 433 from_json(). 434 """ 435 return self._to_json(strip=['_fd'])
436 437 @staticmethod
438 - def from_json(s):
439 d = json.loads(s) 440 return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'], 441 chunksize=d['_chunksize'], resumable=d['_resumable'])
442
443 444 -class MediaInMemoryUpload(MediaIoBaseUpload):
445 """MediaUpload for a chunk of bytes. 446 447 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for 448 the stream. 449 """ 450 451 @util.positional(2)
452 - def __init__(self, body, mimetype='application/octet-stream', 453 chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
454 """Create a new MediaInMemoryUpload. 455 456 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for 457 the stream. 458 459 Args: 460 body: string, Bytes of body content. 461 mimetype: string, Mime-type of the file or default of 462 'application/octet-stream'. 463 chunksize: int, File will be uploaded in chunks of this many bytes. Only 464 used if resumable=True. 465 resumable: bool, True if this is a resumable upload. False means upload 466 in a single request. 467 """ 468 fd = StringIO.StringIO(body) 469 super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize, 470 resumable=resumable)
471
472 473 -class MediaIoBaseDownload(object):
474 """"Download media resources. 475 476 Note that the Python file object is compatible with io.Base and can be used 477 with this class also. 478 479 480 Example: 481 request = farms.animals().get_media(id='cow') 482 fh = io.FileIO('cow.png', mode='wb') 483 downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024) 484 485 done = False 486 while done is False: 487 status, done = downloader.next_chunk() 488 if status: 489 print "Download %d%%." % int(status.progress() * 100) 490 print "Download Complete!" 491 """ 492 493 @util.positional(3)
494 - def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
495 """Constructor. 496 497 Args: 498 fd: io.Base or file object, The stream in which to write the downloaded 499 bytes. 500 request: googleapiclient.http.HttpRequest, the media request to perform in 501 chunks. 502 chunksize: int, File will be downloaded in chunks of this many bytes. 503 """ 504 self._fd = fd 505 self._request = request 506 self._uri = request.uri 507 self._chunksize = chunksize 508 self._progress = 0 509 self._total_size = None 510 self._done = False 511 512 # Stubs for testing. 513 self._sleep = time.sleep 514 self._rand = random.random
515 516 @util.positional(1)
517 - def next_chunk(self, num_retries=0):
518 """Get the next chunk of the download. 519 520 Args: 521 num_retries: Integer, number of times to retry 500's with randomized 522 exponential backoff. If all retries fail, the raised HttpError 523 represents the last request. If zero (default), we attempt the 524 request only once. 525 526 Returns: 527 (status, done): (MediaDownloadStatus, boolean) 528 The value of 'done' will be True when the media has been fully 529 downloaded. 530 531 Raises: 532 googleapiclient.errors.HttpError if the response was not a 2xx. 533 httplib2.HttpLib2Error if a transport error has occured. 534 """ 535 headers = { 536 'range': 'bytes=%d-%d' % ( 537 self._progress, self._progress + self._chunksize) 538 } 539 http = self._request.http 540 541 for retry_num in xrange(num_retries + 1): 542 if retry_num > 0: 543 self._sleep(self._rand() * 2**retry_num) 544 logging.warning( 545 'Retry #%d for media download: GET %s, following status: %d' 546 % (retry_num, self._uri, resp.status)) 547 548 resp, content = http.request(self._uri, headers=headers) 549 if resp.status < 500: 550 break 551 552 if resp.status in [200, 206]: 553 if 'content-location' in resp and resp['content-location'] != self._uri: 554 self._uri = resp['content-location'] 555 self._progress += len(content) 556 self._fd.write(content) 557 558 if 'content-range' in resp: 559 content_range = resp['content-range'] 560 length = content_range.rsplit('/', 1)[1] 561 self._total_size = int(length) 562 elif 'content-length' in resp: 563 self._total_size = int(resp['content-length']) 564 565 if self._progress == self._total_size: 566 self._done = True 567 return MediaDownloadProgress(self._progress, self._total_size), self._done 568 else: 569 raise HttpError(resp, content, uri=self._uri)
570
571 572 -class _StreamSlice(object):
573 """Truncated stream. 574 575 Takes a stream and presents a stream that is a slice of the original stream. 576 This is used when uploading media in chunks. In later versions of Python a 577 stream can be passed to httplib in place of the string of data to send. The 578 problem is that httplib just blindly reads to the end of the stream. This 579 wrapper presents a virtual stream that only reads to the end of the chunk. 580 """ 581
582 - def __init__(self, stream, begin, chunksize):
583 """Constructor. 584 585 Args: 586 stream: (io.Base, file object), the stream to wrap. 587 begin: int, the seek position the chunk begins at. 588 chunksize: int, the size of the chunk. 589 """ 590 self._stream = stream 591 self._begin = begin 592 self._chunksize = chunksize 593 self._stream.seek(begin)
594
595 - def read(self, n=-1):
596 """Read n bytes. 597 598 Args: 599 n, int, the number of bytes to read. 600 601 Returns: 602 A string of length 'n', or less if EOF is reached. 603 """ 604 # The data left available to read sits in [cur, end) 605 cur = self._stream.tell() 606 end = self._begin + self._chunksize 607 if n == -1 or cur + n > end: 608 n = end - cur 609 return self._stream.read(n)
610
611 612 -class HttpRequest(object):
613 """Encapsulates a single HTTP request.""" 614 615 @util.positional(4)
616 - def __init__(self, http, postproc, uri, 617 method='GET', 618 body=None, 619 headers=None, 620 methodId=None, 621 resumable=None):
622 """Constructor for an HttpRequest. 623 624 Args: 625 http: httplib2.Http, the transport object to use to make a request 626 postproc: callable, called on the HTTP response and content to transform 627 it into a data object before returning, or raising an exception 628 on an error. 629 uri: string, the absolute URI to send the request to 630 method: string, the HTTP method to use 631 body: string, the request body of the HTTP request, 632 headers: dict, the HTTP request headers 633 methodId: string, a unique identifier for the API method being called. 634 resumable: MediaUpload, None if this is not a resumbale request. 635 """ 636 self.uri = uri 637 self.method = method 638 self.body = body 639 self.headers = headers or {} 640 self.methodId = methodId 641 self.http = http 642 self.postproc = postproc 643 self.resumable = resumable 644 self.response_callbacks = [] 645 self._in_error_state = False 646 647 # Pull the multipart boundary out of the content-type header. 648 major, minor, params = mimeparse.parse_mime_type( 649 headers.get('content-type', 'application/json')) 650 651 # The size of the non-media part of the request. 652 self.body_size = len(self.body or '') 653 654 # The resumable URI to send chunks to. 655 self.resumable_uri = None 656 657 # The bytes that have been uploaded. 658 self.resumable_progress = 0 659 660 # Stubs for testing. 661 self._rand = random.random 662 self._sleep = time.sleep
663 664 @util.positional(1)
665 - def execute(self, http=None, num_retries=0):
666 """Execute the request. 667 668 Args: 669 http: httplib2.Http, an http object to be used in place of the 670 one the HttpRequest request object was constructed with. 671 num_retries: Integer, number of times to retry 500's with randomized 672 exponential backoff. If all retries fail, the raised HttpError 673 represents the last request. If zero (default), we attempt the 674 request only once. 675 676 Returns: 677 A deserialized object model of the response body as determined 678 by the postproc. 679 680 Raises: 681 googleapiclient.errors.HttpError if the response was not a 2xx. 682 httplib2.HttpLib2Error if a transport error has occured. 683 """ 684 if http is None: 685 http = self.http 686 687 if self.resumable: 688 body = None 689 while body is None: 690 _, body = self.next_chunk(http=http, num_retries=num_retries) 691 return body 692 693 # Non-resumable case. 694 695 if 'content-length' not in self.headers: 696 self.headers['content-length'] = str(self.body_size) 697 # If the request URI is too long then turn it into a POST request. 698 if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET': 699 self.method = 'POST' 700 self.headers['x-http-method-override'] = 'GET' 701 self.headers['content-type'] = 'application/x-www-form-urlencoded' 702 parsed = urlparse.urlparse(self.uri) 703 self.uri = urlparse.urlunparse( 704 (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, 705 None) 706 ) 707 self.body = parsed.query 708 self.headers['content-length'] = str(len(self.body)) 709 710 # Handle retries for server-side errors. 711 for retry_num in xrange(num_retries + 1): 712 if retry_num > 0: 713 self._sleep(self._rand() * 2**retry_num) 714 logging.warning('Retry #%d for request: %s %s, following status: %d' 715 % (retry_num, self.method, self.uri, resp.status)) 716 717 resp, content = http.request(str(self.uri), method=str(self.method), 718 body=self.body, headers=self.headers) 719 if resp.status < 500: 720 break 721 722 for callback in self.response_callbacks: 723 callback(resp) 724 if resp.status >= 300: 725 raise HttpError(resp, content, uri=self.uri) 726 return self.postproc(resp, content)
727 728 @util.positional(2)
729 - def add_response_callback(self, cb):
730 """add_response_headers_callback 731 732 Args: 733 cb: Callback to be called on receiving the response headers, of signature: 734 735 def cb(resp): 736 # Where resp is an instance of httplib2.Response 737 """ 738 self.response_callbacks.append(cb)
739 740 @util.positional(1)
741 - def next_chunk(self, http=None, num_retries=0):
742 """Execute the next step of a resumable upload. 743 744 Can only be used if the method being executed supports media uploads and 745 the MediaUpload object passed in was flagged as using resumable upload. 746 747 Example: 748 749 media = MediaFileUpload('cow.png', mimetype='image/png', 750 chunksize=1000, resumable=True) 751 request = farm.animals().insert( 752 id='cow', 753 name='cow.png', 754 media_body=media) 755 756 response = None 757 while response is None: 758 status, response = request.next_chunk() 759 if status: 760 print "Upload %d%% complete." % int(status.progress() * 100) 761 762 763 Args: 764 http: httplib2.Http, an http object to be used in place of the 765 one the HttpRequest request object was constructed with. 766 num_retries: Integer, number of times to retry 500's with randomized 767 exponential backoff. If all retries fail, the raised HttpError 768 represents the last request. If zero (default), we attempt the 769 request only once. 770 771 Returns: 772 (status, body): (ResumableMediaStatus, object) 773 The body will be None until the resumable media is fully uploaded. 774 775 Raises: 776 googleapiclient.errors.HttpError if the response was not a 2xx. 777 httplib2.HttpLib2Error if a transport error has occured. 778 """ 779 if http is None: 780 http = self.http 781 782 if self.resumable.size() is None: 783 size = '*' 784 else: 785 size = str(self.resumable.size()) 786 787 if self.resumable_uri is None: 788 start_headers = copy.copy(self.headers) 789 start_headers['X-Upload-Content-Type'] = self.resumable.mimetype() 790 if size != '*': 791 start_headers['X-Upload-Content-Length'] = size 792 start_headers['content-length'] = str(self.body_size) 793 794 for retry_num in xrange(num_retries + 1): 795 if retry_num > 0: 796 self._sleep(self._rand() * 2**retry_num) 797 logging.warning( 798 'Retry #%d for resumable URI request: %s %s, following status: %d' 799 % (retry_num, self.method, self.uri, resp.status)) 800 801 resp, content = http.request(self.uri, method=self.method, 802 body=self.body, 803 headers=start_headers) 804 if resp.status < 500: 805 break 806 807 if resp.status == 200 and 'location' in resp: 808 self.resumable_uri = resp['location'] 809 else: 810 raise ResumableUploadError(resp, content) 811 elif self._in_error_state: 812 # If we are in an error state then query the server for current state of 813 # the upload by sending an empty PUT and reading the 'range' header in 814 # the response. 815 headers = { 816 'Content-Range': 'bytes */%s' % size, 817 'content-length': '0' 818 } 819 resp, content = http.request(self.resumable_uri, 'PUT', 820 headers=headers) 821 status, body = self._process_response(resp, content) 822 if body: 823 # The upload was complete. 824 return (status, body) 825 826 # The httplib.request method can take streams for the body parameter, but 827 # only in Python 2.6 or later. If a stream is available under those 828 # conditions then use it as the body argument. 829 if self.resumable.has_stream() and sys.version_info[1] >= 6: 830 data = self.resumable.stream() 831 if self.resumable.chunksize() == -1: 832 data.seek(self.resumable_progress) 833 chunk_end = self.resumable.size() - self.resumable_progress - 1 834 else: 835 # Doing chunking with a stream, so wrap a slice of the stream. 836 data = _StreamSlice(data, self.resumable_progress, 837 self.resumable.chunksize()) 838 chunk_end = min( 839 self.resumable_progress + self.resumable.chunksize() - 1, 840 self.resumable.size() - 1) 841 else: 842 data = self.resumable.getbytes( 843 self.resumable_progress, self.resumable.chunksize()) 844 845 # A short read implies that we are at EOF, so finish the upload. 846 if len(data) < self.resumable.chunksize(): 847 size = str(self.resumable_progress + len(data)) 848 849 chunk_end = self.resumable_progress + len(data) - 1 850 851 headers = { 852 'Content-Range': 'bytes %d-%d/%s' % ( 853 self.resumable_progress, chunk_end, size), 854 # Must set the content-length header here because httplib can't 855 # calculate the size when working with _StreamSlice. 856 'Content-Length': str(chunk_end - self.resumable_progress + 1) 857 } 858 859 for retry_num in xrange(num_retries + 1): 860 if retry_num > 0: 861 self._sleep(self._rand() * 2**retry_num) 862 logging.warning( 863 'Retry #%d for media upload: %s %s, following status: %d' 864 % (retry_num, self.method, self.uri, resp.status)) 865 866 try: 867 resp, content = http.request(self.resumable_uri, method='PUT', 868 body=data, 869 headers=headers) 870 except: 871 self._in_error_state = True 872 raise 873 if resp.status < 500: 874 break 875 876 return self._process_response(resp, content)
877
878 - def _process_response(self, resp, content):
879 """Process the response from a single chunk upload. 880 881 Args: 882 resp: httplib2.Response, the response object. 883 content: string, the content of the response. 884 885 Returns: 886 (status, body): (ResumableMediaStatus, object) 887 The body will be None until the resumable media is fully uploaded. 888 889 Raises: 890 googleapiclient.errors.HttpError if the response was not a 2xx or a 308. 891 """ 892 if resp.status in [200, 201]: 893 self._in_error_state = False 894 return None, self.postproc(resp, content) 895 elif resp.status == 308: 896 self._in_error_state = False 897 # A "308 Resume Incomplete" indicates we are not done. 898 self.resumable_progress = int(resp['range'].split('-')[1]) + 1 899 if 'location' in resp: 900 self.resumable_uri = resp['location'] 901 else: 902 self._in_error_state = True 903 raise HttpError(resp, content, uri=self.uri) 904 905 return (MediaUploadProgress(self.resumable_progress, self.resumable.size()), 906 None)
907
908 - def to_json(self):
909 """Returns a JSON representation of the HttpRequest.""" 910 d = copy.copy(self.__dict__) 911 if d['resumable'] is not None: 912 d['resumable'] = self.resumable.to_json() 913 del d['http'] 914 del d['postproc'] 915 del d['_sleep'] 916 del d['_rand'] 917 918 return json.dumps(d)
919 920 @staticmethod
921 - def from_json(s, http, postproc):
922 """Returns an HttpRequest populated with info from a JSON object.""" 923 d = json.loads(s) 924 if d['resumable'] is not None: 925 d['resumable'] = MediaUpload.new_from_json(d['resumable']) 926 return HttpRequest( 927 http, 928 postproc, 929 uri=d['uri'], 930 method=d['method'], 931 body=d['body'], 932 headers=d['headers'], 933 methodId=d['methodId'], 934 resumable=d['resumable'])
935
936 937 -class BatchHttpRequest(object):
938 """Batches multiple HttpRequest objects into a single HTTP request. 939 940 Example: 941 from googleapiclient.http import BatchHttpRequest 942 943 def list_animals(request_id, response, exception): 944 \"\"\"Do something with the animals list response.\"\"\" 945 if exception is not None: 946 # Do something with the exception. 947 pass 948 else: 949 # Do something with the response. 950 pass 951 952 def list_farmers(request_id, response, exception): 953 \"\"\"Do something with the farmers list response.\"\"\" 954 if exception is not None: 955 # Do something with the exception. 956 pass 957 else: 958 # Do something with the response. 959 pass 960 961 service = build('farm', 'v2') 962 963 batch = BatchHttpRequest() 964 965 batch.add(service.animals().list(), list_animals) 966 batch.add(service.farmers().list(), list_farmers) 967 batch.execute(http=http) 968 """ 969 970 @util.positional(1)
971 - def __init__(self, callback=None, batch_uri=None):
972 """Constructor for a BatchHttpRequest. 973 974 Args: 975 callback: callable, A callback to be called for each response, of the 976 form callback(id, response, exception). The first parameter is the 977 request id, and the second is the deserialized response object. The 978 third is an googleapiclient.errors.HttpError exception object if an HTTP error 979 occurred while processing the request, or None if no error occurred. 980 batch_uri: string, URI to send batch requests to. 981 """ 982 if batch_uri is None: 983 batch_uri = 'https://www.googleapis.com/batch' 984 self._batch_uri = batch_uri 985 986 # Global callback to be called for each individual response in the batch. 987 self._callback = callback 988 989 # A map from id to request. 990 self._requests = {} 991 992 # A map from id to callback. 993 self._callbacks = {} 994 995 # List of request ids, in the order in which they were added. 996 self._order = [] 997 998 # The last auto generated id. 999 self._last_auto_id = 0 1000 1001 # Unique ID on which to base the Content-ID headers. 1002 self._base_id = None 1003 1004 # A map from request id to (httplib2.Response, content) response pairs 1005 self._responses = {} 1006 1007 # A map of id(Credentials) that have been refreshed. 1008 self._refreshed_credentials = {}
1009
1010 - def _refresh_and_apply_credentials(self, request, http):
1011 """Refresh the credentials and apply to the request. 1012 1013 Args: 1014 request: HttpRequest, the request. 1015 http: httplib2.Http, the global http object for the batch. 1016 """ 1017 # For the credentials to refresh, but only once per refresh_token 1018 # If there is no http per the request then refresh the http passed in 1019 # via execute() 1020 creds = None 1021 if request.http is not None and hasattr(request.http.request, 1022 'credentials'): 1023 creds = request.http.request.credentials 1024 elif http is not None and hasattr(http.request, 'credentials'): 1025 creds = http.request.credentials 1026 if creds is not None: 1027 if id(creds) not in self._refreshed_credentials: 1028 creds.refresh(http) 1029 self._refreshed_credentials[id(creds)] = 1 1030 1031 # Only apply the credentials if we are using the http object passed in, 1032 # otherwise apply() will get called during _serialize_request(). 1033 if request.http is None or not hasattr(request.http.request, 1034 'credentials'): 1035 creds.apply(request.headers)
1036
1037 - def _id_to_header(self, id_):
1038 """Convert an id to a Content-ID header value. 1039 1040 Args: 1041 id_: string, identifier of individual request. 1042 1043 Returns: 1044 A Content-ID header with the id_ encoded into it. A UUID is prepended to 1045 the value because Content-ID headers are supposed to be universally 1046 unique. 1047 """ 1048 if self._base_id is None: 1049 self._base_id = uuid.uuid4() 1050 1051 return '<%s+%s>' % (self._base_id, urllib.quote(id_))
1052
1053 - def _header_to_id(self, header):
1054 """Convert a Content-ID header value to an id. 1055 1056 Presumes the Content-ID header conforms to the format that _id_to_header() 1057 returns. 1058 1059 Args: 1060 header: string, Content-ID header value. 1061 1062 Returns: 1063 The extracted id value. 1064 1065 Raises: 1066 BatchError if the header is not in the expected format. 1067 """ 1068 if header[0] != '<' or header[-1] != '>': 1069 raise BatchError("Invalid value for Content-ID: %s" % header) 1070 if '+' not in header: 1071 raise BatchError("Invalid value for Content-ID: %s" % header) 1072 base, id_ = header[1:-1].rsplit('+', 1) 1073 1074 return urllib.unquote(id_)
1075
1076 - def _serialize_request(self, request):
1077 """Convert an HttpRequest object into a string. 1078 1079 Args: 1080 request: HttpRequest, the request to serialize. 1081 1082 Returns: 1083 The request as a string in application/http format. 1084 """ 1085 # Construct status line 1086 parsed = urlparse.urlparse(request.uri) 1087 request_line = urlparse.urlunparse( 1088 (None, None, parsed.path, parsed.params, parsed.query, None) 1089 ) 1090 status_line = request.method + ' ' + request_line + ' HTTP/1.1\n' 1091 major, minor = request.headers.get('content-type', 'application/json').split('/') 1092 msg = MIMENonMultipart(major, minor) 1093 headers = request.headers.copy() 1094 1095 if request.http is not None and hasattr(request.http.request, 1096 'credentials'): 1097 request.http.request.credentials.apply(headers) 1098 1099 # MIMENonMultipart adds its own Content-Type header. 1100 if 'content-type' in headers: 1101 del headers['content-type'] 1102 1103 for key, value in headers.iteritems(): 1104 msg[key] = value 1105 msg['Host'] = parsed.netloc 1106 msg.set_unixfrom(None) 1107 1108 if request.body is not None: 1109 msg.set_payload(request.body) 1110 msg['content-length'] = str(len(request.body)) 1111 1112 # Serialize the mime message. 1113 fp = StringIO.StringIO() 1114 # maxheaderlen=0 means don't line wrap headers. 1115 g = Generator(fp, maxheaderlen=0) 1116 g.flatten(msg, unixfrom=False) 1117 body = fp.getvalue() 1118 1119 # Strip off the \n\n that the MIME lib tacks onto the end of the payload. 1120 if request.body is None: 1121 body = body[:-2] 1122 1123 return status_line.encode('utf-8') + body
1124
1125 - def _deserialize_response(self, payload):
1126 """Convert string into httplib2 response and content. 1127 1128 Args: 1129 payload: string, headers and body as a string. 1130 1131 Returns: 1132 A pair (resp, content), such as would be returned from httplib2.request. 1133 """ 1134 # Strip off the status line 1135 status_line, payload = payload.split('\n', 1) 1136 protocol, status, reason = status_line.split(' ', 2) 1137 1138 # Parse the rest of the response 1139 parser = FeedParser() 1140 parser.feed(payload) 1141 msg = parser.close() 1142 msg['status'] = status 1143 1144 # Create httplib2.Response from the parsed headers. 1145 resp = httplib2.Response(msg) 1146 resp.reason = reason 1147 resp.version = int(protocol.split('/', 1)[1].replace('.', '')) 1148 1149 content = payload.split('\r\n\r\n', 1)[1] 1150 1151 return resp, content
1152
1153 - def _new_id(self):
1154 """Create a new id. 1155 1156 Auto incrementing number that avoids conflicts with ids already used. 1157 1158 Returns: 1159 string, a new unique id. 1160 """ 1161 self._last_auto_id += 1 1162 while str(self._last_auto_id) in self._requests: 1163 self._last_auto_id += 1 1164 return str(self._last_auto_id)
1165 1166 @util.positional(2)
1167 - def add(self, request, callback=None, request_id=None):
1168 """Add a new request. 1169 1170 Every callback added will be paired with a unique id, the request_id. That 1171 unique id will be passed back to the callback when the response comes back 1172 from the server. The default behavior is to have the library generate it's 1173 own unique id. If the caller passes in a request_id then they must ensure 1174 uniqueness for each request_id, and if they are not an exception is 1175 raised. Callers should either supply all request_ids or nevery supply a 1176 request id, to avoid such an error. 1177 1178 Args: 1179 request: HttpRequest, Request to add to the batch. 1180 callback: callable, A callback to be called for this response, of the 1181 form callback(id, response, exception). The first parameter is the 1182 request id, and the second is the deserialized response object. The 1183 third is an googleapiclient.errors.HttpError exception object if an HTTP error 1184 occurred while processing the request, or None if no errors occurred. 1185 request_id: string, A unique id for the request. The id will be passed to 1186 the callback with the response. 1187 1188 Returns: 1189 None 1190 1191 Raises: 1192 BatchError if a media request is added to a batch. 1193 KeyError is the request_id is not unique. 1194 """ 1195 if request_id is None: 1196 request_id = self._new_id() 1197 if request.resumable is not None: 1198 raise BatchError("Media requests cannot be used in a batch request.") 1199 if request_id in self._requests: 1200 raise KeyError("A request with this ID already exists: %s" % request_id) 1201 self._requests[request_id] = request 1202 self._callbacks[request_id] = callback 1203 self._order.append(request_id)
1204
1205 - def _execute(self, http, order, requests):
1206 """Serialize batch request, send to server, process response. 1207 1208 Args: 1209 http: httplib2.Http, an http object to be used to make the request with. 1210 order: list, list of request ids in the order they were added to the 1211 batch. 1212 request: list, list of request objects to send. 1213 1214 Raises: 1215 httplib2.HttpLib2Error if a transport error has occured. 1216 googleapiclient.errors.BatchError if the response is the wrong format. 1217 """ 1218 message = MIMEMultipart('mixed') 1219 # Message should not write out it's own headers. 1220 setattr(message, '_write_headers', lambda self: None) 1221 1222 # Add all the individual requests. 1223 for request_id in order: 1224 request = requests[request_id] 1225 1226 msg = MIMENonMultipart('application', 'http') 1227 msg['Content-Transfer-Encoding'] = 'binary' 1228 msg['Content-ID'] = self._id_to_header(request_id) 1229 1230 body = self._serialize_request(request) 1231 msg.set_payload(body) 1232 message.attach(msg) 1233 1234 # encode the body: note that we can't use `as_string`, because 1235 # it plays games with `From ` lines. 1236 fp = StringIO.StringIO() 1237 g = Generator(fp, mangle_from_=False) 1238 g.flatten(message, unixfrom=False) 1239 body = fp.getvalue() 1240 1241 headers = {} 1242 headers['content-type'] = ('multipart/mixed; ' 1243 'boundary="%s"') % message.get_boundary() 1244 1245 resp, content = http.request(self._batch_uri, method='POST', body=body, 1246 headers=headers) 1247 1248 if resp.status >= 300: 1249 raise HttpError(resp, content, uri=self._batch_uri) 1250 1251 # Now break out the individual responses and store each one. 1252 boundary, _ = content.split(None, 1) 1253 1254 # Prepend with a content-type header so FeedParser can handle it. 1255 header = 'content-type: %s\r\n\r\n' % resp['content-type'] 1256 for_parser = header + content 1257 1258 parser = FeedParser() 1259 parser.feed(for_parser) 1260 mime_response = parser.close() 1261 1262 if not mime_response.is_multipart(): 1263 raise BatchError("Response not in multipart/mixed format.", resp=resp, 1264 content=content) 1265 1266 for part in mime_response.get_payload(): 1267 request_id = self._header_to_id(part['Content-ID']) 1268 response, content = self._deserialize_response(part.get_payload()) 1269 self._responses[request_id] = (response, content)
1270 1271 @util.positional(1)
1272 - def execute(self, http=None):
1273 """Execute all the requests as a single batched HTTP request. 1274 1275 Args: 1276 http: httplib2.Http, an http object to be used in place of the one the 1277 HttpRequest request object was constructed with. If one isn't supplied 1278 then use a http object from the requests in this batch. 1279 1280 Returns: 1281 None 1282 1283 Raises: 1284 httplib2.HttpLib2Error if a transport error has occured. 1285 googleapiclient.errors.BatchError if the response is the wrong format. 1286 """ 1287 1288 # If http is not supplied use the first valid one given in the requests. 1289 if http is None: 1290 for request_id in self._order: 1291 request = self._requests[request_id] 1292 if request is not None: 1293 http = request.http 1294 break 1295 1296 if http is None: 1297 raise ValueError("Missing a valid http object.") 1298 1299 self._execute(http, self._order, self._requests) 1300 1301 # Loop over all the requests and check for 401s. For each 401 request the 1302 # credentials should be refreshed and then sent again in a separate batch. 1303 redo_requests = {} 1304 redo_order = [] 1305 1306 for request_id in self._order: 1307 resp, content = self._responses[request_id] 1308 if resp['status'] == '401': 1309 redo_order.append(request_id) 1310 request = self._requests[request_id] 1311 self._refresh_and_apply_credentials(request, http) 1312 redo_requests[request_id] = request 1313 1314 if redo_requests: 1315 self._execute(http, redo_order, redo_requests) 1316 1317 # Now process all callbacks that are erroring, and raise an exception for 1318 # ones that return a non-2xx response? Or add extra parameter to callback 1319 # that contains an HttpError? 1320 1321 for request_id in self._order: 1322 resp, content = self._responses[request_id] 1323 1324 request = self._requests[request_id] 1325 callback = self._callbacks[request_id] 1326 1327 response = None 1328 exception = None 1329 try: 1330 if resp.status >= 300: 1331 raise HttpError(resp, content, uri=request.uri) 1332 response = request.postproc(resp, content) 1333 except HttpError, e: 1334 exception = e 1335 1336 if callback is not None: 1337 callback(request_id, response, exception) 1338 if self._callback is not None: 1339 self._callback(request_id, response, exception)
1340
1341 1342 -class HttpRequestMock(object):
1343 """Mock of HttpRequest. 1344 1345 Do not construct directly, instead use RequestMockBuilder. 1346 """ 1347
1348 - def __init__(self, resp, content, postproc):
1349 """Constructor for HttpRequestMock 1350 1351 Args: 1352 resp: httplib2.Response, the response to emulate coming from the request 1353 content: string, the response body 1354 postproc: callable, the post processing function usually supplied by 1355 the model class. See model.JsonModel.response() as an example. 1356 """ 1357 self.resp = resp 1358 self.content = content 1359 self.postproc = postproc 1360 if resp is None: 1361 self.resp = httplib2.Response({'status': 200, 'reason': 'OK'}) 1362 if 'reason' in self.resp: 1363 self.resp.reason = self.resp['reason']
1364
1365 - def execute(self, http=None):
1366 """Execute the request. 1367 1368 Same behavior as HttpRequest.execute(), but the response is 1369 mocked and not really from an HTTP request/response. 1370 """ 1371 return self.postproc(self.resp, self.content)
1372
1373 1374 -class RequestMockBuilder(object):
1375 """A simple mock of HttpRequest 1376 1377 Pass in a dictionary to the constructor that maps request methodIds to 1378 tuples of (httplib2.Response, content, opt_expected_body) that should be 1379 returned when that method is called. None may also be passed in for the 1380 httplib2.Response, in which case a 200 OK response will be generated. 1381 If an opt_expected_body (str or dict) is provided, it will be compared to 1382 the body and UnexpectedBodyError will be raised on inequality. 1383 1384 Example: 1385 response = '{"data": {"id": "tag:google.c...' 1386 requestBuilder = RequestMockBuilder( 1387 { 1388 'plus.activities.get': (None, response), 1389 } 1390 ) 1391 googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder) 1392 1393 Methods that you do not supply a response for will return a 1394 200 OK with an empty string as the response content or raise an excpetion 1395 if check_unexpected is set to True. The methodId is taken from the rpcName 1396 in the discovery document. 1397 1398 For more details see the project wiki. 1399 """ 1400
1401 - def __init__(self, responses, check_unexpected=False):
1402 """Constructor for RequestMockBuilder 1403 1404 The constructed object should be a callable object 1405 that can replace the class HttpResponse. 1406 1407 responses - A dictionary that maps methodIds into tuples 1408 of (httplib2.Response, content). The methodId 1409 comes from the 'rpcName' field in the discovery 1410 document. 1411 check_unexpected - A boolean setting whether or not UnexpectedMethodError 1412 should be raised on unsupplied method. 1413 """ 1414 self.responses = responses 1415 self.check_unexpected = check_unexpected
1416
1417 - def __call__(self, http, postproc, uri, method='GET', body=None, 1418 headers=None, methodId=None, resumable=None):
1419 """Implements the callable interface that discovery.build() expects 1420 of requestBuilder, which is to build an object compatible with 1421 HttpRequest.execute(). See that method for the description of the 1422 parameters and the expected response. 1423 """ 1424 if methodId in self.responses: 1425 response = self.responses[methodId] 1426 resp, content = response[:2] 1427 if len(response) > 2: 1428 # Test the body against the supplied expected_body. 1429 expected_body = response[2] 1430 if bool(expected_body) != bool(body): 1431 # Not expecting a body and provided one 1432 # or expecting a body and not provided one. 1433 raise UnexpectedBodyError(expected_body, body) 1434 if isinstance(expected_body, str): 1435 expected_body = json.loads(expected_body) 1436 body = json.loads(body) 1437 if body != expected_body: 1438 raise UnexpectedBodyError(expected_body, body) 1439 return HttpRequestMock(resp, content, postproc) 1440 elif self.check_unexpected: 1441 raise UnexpectedMethodError(methodId=methodId) 1442 else: 1443 model = JsonModel(False) 1444 return HttpRequestMock(None, '{}', model.response)
1445
1446 1447 -class HttpMock(object):
1448 """Mock of httplib2.Http""" 1449
1450 - def __init__(self, filename=None, headers=None):
1451 """ 1452 Args: 1453 filename: string, absolute filename to read response from 1454 headers: dict, header to return with response 1455 """ 1456 if headers is None: 1457 headers = {'status': '200 OK'} 1458 if filename: 1459 f = file(filename, 'r') 1460 self.data = f.read() 1461 f.close() 1462 else: 1463 self.data = None 1464 self.response_headers = headers 1465 self.headers = None 1466 self.uri = None 1467 self.method = None 1468 self.body = None 1469 self.headers = None
1470 1471
1472 - def request(self, uri, 1473 method='GET', 1474 body=None, 1475 headers=None, 1476 redirections=1, 1477 connection_type=None):
1478 self.uri = uri 1479 self.method = method 1480 self.body = body 1481 self.headers = headers 1482 return httplib2.Response(self.response_headers), self.data
1483
1484 1485 -class HttpMockSequence(object):
1486 """Mock of httplib2.Http 1487 1488 Mocks a sequence of calls to request returning different responses for each 1489 call. Create an instance initialized with the desired response headers 1490 and content and then use as if an httplib2.Http instance. 1491 1492 http = HttpMockSequence([ 1493 ({'status': '401'}, ''), 1494 ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'), 1495 ({'status': '200'}, 'echo_request_headers'), 1496 ]) 1497 resp, content = http.request("http://examples.com") 1498 1499 There are special values you can pass in for content to trigger 1500 behavours that are helpful in testing. 1501 1502 'echo_request_headers' means return the request headers in the response body 1503 'echo_request_headers_as_json' means return the request headers in 1504 the response body 1505 'echo_request_body' means return the request body in the response body 1506 'echo_request_uri' means return the request uri in the response body 1507 """ 1508
1509 - def __init__(self, iterable):
1510 """ 1511 Args: 1512 iterable: iterable, a sequence of pairs of (headers, body) 1513 """ 1514 self._iterable = iterable 1515 self.follow_redirects = True
1516
1517 - def request(self, uri, 1518 method='GET', 1519 body=None, 1520 headers=None, 1521 redirections=1, 1522 connection_type=None):
1523 resp, content = self._iterable.pop(0) 1524 if content == 'echo_request_headers': 1525 content = headers 1526 elif content == 'echo_request_headers_as_json': 1527 content = json.dumps(headers) 1528 elif content == 'echo_request_body': 1529 if hasattr(body, 'read'): 1530 content = body.read() 1531 else: 1532 content = body 1533 elif content == 'echo_request_uri': 1534 content = uri 1535 return httplib2.Response(resp), content
1536
1537 1538 -def set_user_agent(http, user_agent):
1539 """Set the user-agent on every request. 1540 1541 Args: 1542 http - An instance of httplib2.Http 1543 or something that acts like it. 1544 user_agent: string, the value for the user-agent header. 1545 1546 Returns: 1547 A modified instance of http that was passed in. 1548 1549 Example: 1550 1551 h = httplib2.Http() 1552 h = set_user_agent(h, "my-app-name/6.0") 1553 1554 Most of the time the user-agent will be set doing auth, this is for the rare 1555 cases where you are accessing an unauthenticated endpoint. 1556 """ 1557 request_orig = http.request 1558 1559 # The closure that will replace 'httplib2.Http.request'. 1560 def new_request(uri, method='GET', body=None, headers=None, 1561 redirections=httplib2.DEFAULT_MAX_REDIRECTS, 1562 connection_type=None): 1563 """Modify the request headers to add the user-agent.""" 1564 if headers is None: 1565 headers = {} 1566 if 'user-agent' in headers: 1567 headers['user-agent'] = user_agent + ' ' + headers['user-agent'] 1568 else: 1569 headers['user-agent'] = user_agent 1570 resp, content = request_orig(uri, method, body, headers, 1571 redirections, connection_type) 1572 return resp, content
1573 1574 http.request = new_request 1575 return http 1576
1577 1578 -def tunnel_patch(http):
1579 """Tunnel PATCH requests over POST. 1580 Args: 1581 http - An instance of httplib2.Http 1582 or something that acts like it. 1583 1584 Returns: 1585 A modified instance of http that was passed in. 1586 1587 Example: 1588 1589 h = httplib2.Http() 1590 h = tunnel_patch(h, "my-app-name/6.0") 1591 1592 Useful if you are running on a platform that doesn't support PATCH. 1593 Apply this last if you are using OAuth 1.0, as changing the method 1594 will result in a different signature. 1595 """ 1596 request_orig = http.request 1597 1598 # The closure that will replace 'httplib2.Http.request'. 1599 def new_request(uri, method='GET', body=None, headers=None, 1600 redirections=httplib2.DEFAULT_MAX_REDIRECTS, 1601 connection_type=None): 1602 """Modify the request headers to add the user-agent.""" 1603 if headers is None: 1604 headers = {} 1605 if method == 'PATCH': 1606 if 'oauth_token' in headers.get('authorization', ''): 1607 logging.warning( 1608 'OAuth 1.0 request made with Credentials after tunnel_patch.') 1609 headers['x-http-method-override'] = "PATCH" 1610 method = 'POST' 1611 resp, content = request_orig(uri, method, body, headers, 1612 redirections, connection_type) 1613 return resp, content
1614 1615 http.request = new_request 1616 return http 1617