Package apiclient :: Module http
[hide private]
[frames] | no frames]

Source Code for Module apiclient.http

   1  # Copyright (C) 2012 Google Inc. 
   2  # 
   3  # Licensed under the Apache License, Version 2.0 (the "License"); 
   4  # you may not use this file except in compliance with the License. 
   5  # You may obtain a copy of the License at 
   6  # 
   7  #      http://www.apache.org/licenses/LICENSE-2.0 
   8  # 
   9  # Unless required by applicable law or agreed to in writing, software 
  10  # distributed under the License is distributed on an "AS IS" BASIS, 
  11  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
  12  # See the License for the specific language governing permissions and 
  13  # limitations under the License. 
  14   
  15  """Classes to encapsulate a single HTTP request. 
  16   
  17  The classes implement a command pattern, with every 
  18  object supporting an execute() method that does the 
  19  actuall HTTP request. 
  20  """ 
  21   
  22  __author__ = 'jcgregorio@google.com (Joe Gregorio)' 
  23   
  24  import StringIO 
  25  import base64 
  26  import copy 
  27  import gzip 
  28  import httplib2 
  29  import mimeparse 
  30  import mimetypes 
  31  import os 
  32  import sys 
  33  import urllib 
  34  import urlparse 
  35  import uuid 
  36   
  37  from email.generator import Generator 
  38  from email.mime.multipart import MIMEMultipart 
  39  from email.mime.nonmultipart import MIMENonMultipart 
  40  from email.parser import FeedParser 
  41  from errors import BatchError 
  42  from errors import HttpError 
  43  from errors import InvalidChunkSizeError 
  44  from errors import ResumableUploadError 
  45  from errors import UnexpectedBodyError 
  46  from errors import UnexpectedMethodError 
  47  from model import JsonModel 
  48  from oauth2client import util 
  49  from oauth2client.anyjson import simplejson 
  50   
  51   
  52  DEFAULT_CHUNK_SIZE = 512*1024 
  53   
  54  MAX_URI_LENGTH = 4000 
55 56 57 -class MediaUploadProgress(object):
58 """Status of a resumable upload.""" 59
60 - def __init__(self, resumable_progress, total_size):
61 """Constructor. 62 63 Args: 64 resumable_progress: int, bytes sent so far. 65 total_size: int, total bytes in complete upload, or None if the total 66 upload size isn't known ahead of time. 67 """ 68 self.resumable_progress = resumable_progress 69 self.total_size = total_size
70
71 - def progress(self):
72 """Percent of upload completed, as a float. 73 74 Returns: 75 the percentage complete as a float, returning 0.0 if the total size of 76 the upload is unknown. 77 """ 78 if self.total_size is not None: 79 return float(self.resumable_progress) / float(self.total_size) 80 else: 81 return 0.0
82
83 84 -class MediaDownloadProgress(object):
85 """Status of a resumable download.""" 86
87 - def __init__(self, resumable_progress, total_size):
88 """Constructor. 89 90 Args: 91 resumable_progress: int, bytes received so far. 92 total_size: int, total bytes in complete download. 93 """ 94 self.resumable_progress = resumable_progress 95 self.total_size = total_size
96
97 - def progress(self):
98 """Percent of download completed, as a float. 99 100 Returns: 101 the percentage complete as a float, returning 0.0 if the total size of 102 the download is unknown. 103 """ 104 if self.total_size is not None: 105 return float(self.resumable_progress) / float(self.total_size) 106 else: 107 return 0.0
108
109 110 -class MediaUpload(object):
111 """Describes a media object to upload. 112 113 Base class that defines the interface of MediaUpload subclasses. 114 115 Note that subclasses of MediaUpload may allow you to control the chunksize 116 when uploading a media object. It is important to keep the size of the chunk 117 as large as possible to keep the upload efficient. Other factors may influence 118 the size of the chunk you use, particularly if you are working in an 119 environment where individual HTTP requests may have a hardcoded time limit, 120 such as under certain classes of requests under Google App Engine. 121 122 Streams are io.Base compatible objects that support seek(). Some MediaUpload 123 subclasses support using streams directly to upload data. Support for 124 streaming may be indicated by a MediaUpload sub-class and if appropriate for a 125 platform that stream will be used for uploading the media object. The support 126 for streaming is indicated by has_stream() returning True. The stream() method 127 should return an io.Base object that supports seek(). On platforms where the 128 underlying httplib module supports streaming, for example Python 2.6 and 129 later, the stream will be passed into the http library which will result in 130 less memory being used and possibly faster uploads. 131 132 If you need to upload media that can't be uploaded using any of the existing 133 MediaUpload sub-class then you can sub-class MediaUpload for your particular 134 needs. 135 """ 136
137 - def chunksize(self):
138 """Chunk size for resumable uploads. 139 140 Returns: 141 Chunk size in bytes. 142 """ 143 raise NotImplementedError()
144
145 - def mimetype(self):
146 """Mime type of the body. 147 148 Returns: 149 Mime type. 150 """ 151 return 'application/octet-stream'
152
153 - def size(self):
154 """Size of upload. 155 156 Returns: 157 Size of the body, or None of the size is unknown. 158 """ 159 return None
160
161 - def resumable(self):
162 """Whether this upload is resumable. 163 164 Returns: 165 True if resumable upload or False. 166 """ 167 return False
168
169 - def getbytes(self, begin, end):
170 """Get bytes from the media. 171 172 Args: 173 begin: int, offset from beginning of file. 174 length: int, number of bytes to read, starting at begin. 175 176 Returns: 177 A string of bytes read. May be shorter than length if EOF was reached 178 first. 179 """ 180 raise NotImplementedError()
181
182 - def has_stream(self):
183 """Does the underlying upload support a streaming interface. 184 185 Streaming means it is an io.IOBase subclass that supports seek, i.e. 186 seekable() returns True. 187 188 Returns: 189 True if the call to stream() will return an instance of a seekable io.Base 190 subclass. 191 """ 192 return False
193
194 - def stream(self):
195 """A stream interface to the data being uploaded. 196 197 Returns: 198 The returned value is an io.IOBase subclass that supports seek, i.e. 199 seekable() returns True. 200 """ 201 raise NotImplementedError()
202 203 @util.positional(1)
204 - def _to_json(self, strip=None):
205 """Utility function for creating a JSON representation of a MediaUpload. 206 207 Args: 208 strip: array, An array of names of members to not include in the JSON. 209 210 Returns: 211 string, a JSON representation of this instance, suitable to pass to 212 from_json(). 213 """ 214 t = type(self) 215 d = copy.copy(self.__dict__) 216 if strip is not None: 217 for member in strip: 218 del d[member] 219 d['_class'] = t.__name__ 220 d['_module'] = t.__module__ 221 return simplejson.dumps(d)
222
223 - def to_json(self):
224 """Create a JSON representation of an instance of MediaUpload. 225 226 Returns: 227 string, a JSON representation of this instance, suitable to pass to 228 from_json(). 229 """ 230 return self._to_json()
231 232 @classmethod
233 - def new_from_json(cls, s):
234 """Utility class method to instantiate a MediaUpload subclass from a JSON 235 representation produced by to_json(). 236 237 Args: 238 s: string, JSON from to_json(). 239 240 Returns: 241 An instance of the subclass of MediaUpload that was serialized with 242 to_json(). 243 """ 244 data = simplejson.loads(s) 245 # Find and call the right classmethod from_json() to restore the object. 246 module = data['_module'] 247 m = __import__(module, fromlist=module.split('.')[:-1]) 248 kls = getattr(m, data['_class']) 249 from_json = getattr(kls, 'from_json') 250 return from_json(s)
251
252 253 -class MediaIoBaseUpload(MediaUpload):
254 """A MediaUpload for a io.Base objects. 255 256 Note that the Python file object is compatible with io.Base and can be used 257 with this class also. 258 259 fh = io.BytesIO('...Some data to upload...') 260 media = MediaIoBaseUpload(fh, mimetype='image/png', 261 chunksize=1024*1024, resumable=True) 262 farm.animals().insert( 263 id='cow', 264 name='cow.png', 265 media_body=media).execute() 266 267 Depending on the platform you are working on, you may pass -1 as the 268 chunksize, which indicates that the entire file should be uploaded in a single 269 request. If the underlying platform supports streams, such as Python 2.6 or 270 later, then this can be very efficient as it avoids multiple connections, and 271 also avoids loading the entire file into memory before sending it. Note that 272 Google App Engine has a 5MB limit on request size, so you should never set 273 your chunksize larger than 5MB, or to -1. 274 """ 275 276 @util.positional(3)
277 - def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE, 278 resumable=False):
279 """Constructor. 280 281 Args: 282 fd: io.Base or file object, The source of the bytes to upload. MUST be 283 opened in blocking mode, do not use streams opened in non-blocking mode. 284 The given stream must be seekable, that is, it must be able to call 285 seek() on fd. 286 mimetype: string, Mime-type of the file. 287 chunksize: int, File will be uploaded in chunks of this many bytes. Only 288 used if resumable=True. Pass in a value of -1 if the file is to be 289 uploaded as a single chunk. Note that Google App Engine has a 5MB limit 290 on request size, so you should never set your chunksize larger than 5MB, 291 or to -1. 292 resumable: bool, True if this is a resumable upload. False means upload 293 in a single request. 294 """ 295 super(MediaIoBaseUpload, self).__init__() 296 self._fd = fd 297 self._mimetype = mimetype 298 if not (chunksize == -1 or chunksize > 0): 299 raise InvalidChunkSizeError() 300 self._chunksize = chunksize 301 self._resumable = resumable 302 303 self._fd.seek(0, os.SEEK_END) 304 self._size = self._fd.tell()
305
306 - def chunksize(self):
307 """Chunk size for resumable uploads. 308 309 Returns: 310 Chunk size in bytes. 311 """ 312 return self._chunksize
313
314 - def mimetype(self):
315 """Mime type of the body. 316 317 Returns: 318 Mime type. 319 """ 320 return self._mimetype
321
322 - def size(self):
323 """Size of upload. 324 325 Returns: 326 Size of the body, or None of the size is unknown. 327 """ 328 return self._size
329
330 - def resumable(self):
331 """Whether this upload is resumable. 332 333 Returns: 334 True if resumable upload or False. 335 """ 336 return self._resumable
337
338 - def getbytes(self, begin, length):
339 """Get bytes from the media. 340 341 Args: 342 begin: int, offset from beginning of file. 343 length: int, number of bytes to read, starting at begin. 344 345 Returns: 346 A string of bytes read. May be shorted than length if EOF was reached 347 first. 348 """ 349 self._fd.seek(begin) 350 return self._fd.read(length)
351
352 - def has_stream(self):
353 """Does the underlying upload support a streaming interface. 354 355 Streaming means it is an io.IOBase subclass that supports seek, i.e. 356 seekable() returns True. 357 358 Returns: 359 True if the call to stream() will return an instance of a seekable io.Base 360 subclass. 361 """ 362 return True
363
364 - def stream(self):
365 """A stream interface to the data being uploaded. 366 367 Returns: 368 The returned value is an io.IOBase subclass that supports seek, i.e. 369 seekable() returns True. 370 """ 371 return self._fd
372
373 - def to_json(self):
374 """This upload type is not serializable.""" 375 raise NotImplementedError('MediaIoBaseUpload is not serializable.')
376
377 378 -class MediaFileUpload(MediaIoBaseUpload):
379 """A MediaUpload for a file. 380 381 Construct a MediaFileUpload and pass as the media_body parameter of the 382 method. For example, if we had a service that allowed uploading images: 383 384 385 media = MediaFileUpload('cow.png', mimetype='image/png', 386 chunksize=1024*1024, resumable=True) 387 farm.animals().insert( 388 id='cow', 389 name='cow.png', 390 media_body=media).execute() 391 392 Depending on the platform you are working on, you may pass -1 as the 393 chunksize, which indicates that the entire file should be uploaded in a single 394 request. If the underlying platform supports streams, such as Python 2.6 or 395 later, then this can be very efficient as it avoids multiple connections, and 396 also avoids loading the entire file into memory before sending it. Note that 397 Google App Engine has a 5MB limit on request size, so you should never set 398 your chunksize larger than 5MB, or to -1. 399 """ 400 401 @util.positional(2)
402 - def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE, 403 resumable=False):
404 """Constructor. 405 406 Args: 407 filename: string, Name of the file. 408 mimetype: string, Mime-type of the file. If None then a mime-type will be 409 guessed from the file extension. 410 chunksize: int, File will be uploaded in chunks of this many bytes. Only 411 used if resumable=True. Pass in a value of -1 if the file is to be 412 uploaded in a single chunk. Note that Google App Engine has a 5MB limit 413 on request size, so you should never set your chunksize larger than 5MB, 414 or to -1. 415 resumable: bool, True if this is a resumable upload. False means upload 416 in a single request. 417 """ 418 self._filename = filename 419 fd = open(self._filename, 'rb') 420 if mimetype is None: 421 (mimetype, encoding) = mimetypes.guess_type(filename) 422 super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize, 423 resumable=resumable)
424
425 - def to_json(self):
426 """Creating a JSON representation of an instance of MediaFileUpload. 427 428 Returns: 429 string, a JSON representation of this instance, suitable to pass to 430 from_json(). 431 """ 432 return self._to_json(strip=['_fd'])
433 434 @staticmethod
435 - def from_json(s):
436 d = simplejson.loads(s) 437 return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'], 438 chunksize=d['_chunksize'], resumable=d['_resumable'])
439
440 441 -class MediaInMemoryUpload(MediaIoBaseUpload):
442 """MediaUpload for a chunk of bytes. 443 444 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for 445 the stream. 446 """ 447 448 @util.positional(2)
449 - def __init__(self, body, mimetype='application/octet-stream', 450 chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
451 """Create a new MediaInMemoryUpload. 452 453 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for 454 the stream. 455 456 Args: 457 body: string, Bytes of body content. 458 mimetype: string, Mime-type of the file or default of 459 'application/octet-stream'. 460 chunksize: int, File will be uploaded in chunks of this many bytes. Only 461 used if resumable=True. 462 resumable: bool, True if this is a resumable upload. False means upload 463 in a single request. 464 """ 465 fd = StringIO.StringIO(body) 466 super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize, 467 resumable=resumable)
468
469 470 -class MediaIoBaseDownload(object):
471 """"Download media resources. 472 473 Note that the Python file object is compatible with io.Base and can be used 474 with this class also. 475 476 477 Example: 478 request = farms.animals().get_media(id='cow') 479 fh = io.FileIO('cow.png', mode='wb') 480 downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024) 481 482 done = False 483 while done is False: 484 status, done = downloader.next_chunk() 485 if status: 486 print "Download %d%%." % int(status.progress() * 100) 487 print "Download Complete!" 488 """ 489 490 @util.positional(3)
491 - def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
492 """Constructor. 493 494 Args: 495 fd: io.Base or file object, The stream in which to write the downloaded 496 bytes. 497 request: apiclient.http.HttpRequest, the media request to perform in 498 chunks. 499 chunksize: int, File will be downloaded in chunks of this many bytes. 500 """ 501 self._fd = fd 502 self._request = request 503 self._uri = request.uri 504 self._chunksize = chunksize 505 self._progress = 0 506 self._total_size = None 507 self._done = False
508
509 - def next_chunk(self):
510 """Get the next chunk of the download. 511 512 Returns: 513 (status, done): (MediaDownloadStatus, boolean) 514 The value of 'done' will be True when the media has been fully 515 downloaded. 516 517 Raises: 518 apiclient.errors.HttpError if the response was not a 2xx. 519 httplib2.HttpLib2Error if a transport error has occured. 520 """ 521 headers = { 522 'range': 'bytes=%d-%d' % ( 523 self._progress, self._progress + self._chunksize) 524 } 525 http = self._request.http 526 http.follow_redirects = False 527 528 resp, content = http.request(self._uri, headers=headers) 529 if resp.status in [301, 302, 303, 307, 308] and 'location' in resp: 530 self._uri = resp['location'] 531 resp, content = http.request(self._uri, headers=headers) 532 if resp.status in [200, 206]: 533 self._progress += len(content) 534 self._fd.write(content) 535 536 if 'content-range' in resp: 537 content_range = resp['content-range'] 538 length = content_range.rsplit('/', 1)[1] 539 self._total_size = int(length) 540 541 if self._progress == self._total_size: 542 self._done = True 543 return MediaDownloadProgress(self._progress, self._total_size), self._done 544 else: 545 raise HttpError(resp, content, uri=self._uri)
546
547 548 -class _StreamSlice(object):
549 """Truncated stream. 550 551 Takes a stream and presents a stream that is a slice of the original stream. 552 This is used when uploading media in chunks. In later versions of Python a 553 stream can be passed to httplib in place of the string of data to send. The 554 problem is that httplib just blindly reads to the end of the stream. This 555 wrapper presents a virtual stream that only reads to the end of the chunk. 556 """ 557
558 - def __init__(self, stream, begin, chunksize):
559 """Constructor. 560 561 Args: 562 stream: (io.Base, file object), the stream to wrap. 563 begin: int, the seek position the chunk begins at. 564 chunksize: int, the size of the chunk. 565 """ 566 self._stream = stream 567 self._begin = begin 568 self._chunksize = chunksize 569 self._stream.seek(begin)
570
571 - def read(self, n=-1):
572 """Read n bytes. 573 574 Args: 575 n, int, the number of bytes to read. 576 577 Returns: 578 A string of length 'n', or less if EOF is reached. 579 """ 580 # The data left available to read sits in [cur, end) 581 cur = self._stream.tell() 582 end = self._begin + self._chunksize 583 if n == -1 or cur + n > end: 584 n = end - cur 585 return self._stream.read(n)
586
587 588 -class HttpRequest(object):
589 """Encapsulates a single HTTP request.""" 590 591 @util.positional(4)
592 - def __init__(self, http, postproc, uri, 593 method='GET', 594 body=None, 595 headers=None, 596 methodId=None, 597 resumable=None):
598 """Constructor for an HttpRequest. 599 600 Args: 601 http: httplib2.Http, the transport object to use to make a request 602 postproc: callable, called on the HTTP response and content to transform 603 it into a data object before returning, or raising an exception 604 on an error. 605 uri: string, the absolute URI to send the request to 606 method: string, the HTTP method to use 607 body: string, the request body of the HTTP request, 608 headers: dict, the HTTP request headers 609 methodId: string, a unique identifier for the API method being called. 610 resumable: MediaUpload, None if this is not a resumbale request. 611 """ 612 self.uri = uri 613 self.method = method 614 self.body = body 615 self.headers = headers or {} 616 self.methodId = methodId 617 self.http = http 618 self.postproc = postproc 619 self.resumable = resumable 620 self._in_error_state = False 621 622 # Pull the multipart boundary out of the content-type header. 623 major, minor, params = mimeparse.parse_mime_type( 624 headers.get('content-type', 'application/json')) 625 626 # The size of the non-media part of the request. 627 self.body_size = len(self.body or '') 628 629 # The resumable URI to send chunks to. 630 self.resumable_uri = None 631 632 # The bytes that have been uploaded. 633 self.resumable_progress = 0
634 635 @util.positional(1)
636 - def execute(self, http=None):
637 """Execute the request. 638 639 Args: 640 http: httplib2.Http, an http object to be used in place of the 641 one the HttpRequest request object was constructed with. 642 643 Returns: 644 A deserialized object model of the response body as determined 645 by the postproc. 646 647 Raises: 648 apiclient.errors.HttpError if the response was not a 2xx. 649 httplib2.HttpLib2Error if a transport error has occured. 650 """ 651 if http is None: 652 http = self.http 653 if self.resumable: 654 body = None 655 while body is None: 656 _, body = self.next_chunk(http=http) 657 return body 658 else: 659 if 'content-length' not in self.headers: 660 self.headers['content-length'] = str(self.body_size) 661 # If the request URI is too long then turn it into a POST request. 662 if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET': 663 self.method = 'POST' 664 self.headers['x-http-method-override'] = 'GET' 665 self.headers['content-type'] = 'application/x-www-form-urlencoded' 666 parsed = urlparse.urlparse(self.uri) 667 self.uri = urlparse.urlunparse( 668 (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, 669 None) 670 ) 671 self.body = parsed.query 672 self.headers['content-length'] = str(len(self.body)) 673 674 resp, content = http.request(self.uri, method=self.method, 675 body=self.body, 676 headers=self.headers) 677 if resp.status >= 300: 678 raise HttpError(resp, content, uri=self.uri) 679 return self.postproc(resp, content)
680 681 @util.positional(1)
682 - def next_chunk(self, http=None):
683 """Execute the next step of a resumable upload. 684 685 Can only be used if the method being executed supports media uploads and 686 the MediaUpload object passed in was flagged as using resumable upload. 687 688 Example: 689 690 media = MediaFileUpload('cow.png', mimetype='image/png', 691 chunksize=1000, resumable=True) 692 request = farm.animals().insert( 693 id='cow', 694 name='cow.png', 695 media_body=media) 696 697 response = None 698 while response is None: 699 status, response = request.next_chunk() 700 if status: 701 print "Upload %d%% complete." % int(status.progress() * 100) 702 703 704 Returns: 705 (status, body): (ResumableMediaStatus, object) 706 The body will be None until the resumable media is fully uploaded. 707 708 Raises: 709 apiclient.errors.HttpError if the response was not a 2xx. 710 httplib2.HttpLib2Error if a transport error has occured. 711 """ 712 if http is None: 713 http = self.http 714 715 if self.resumable.size() is None: 716 size = '*' 717 else: 718 size = str(self.resumable.size()) 719 720 if self.resumable_uri is None: 721 start_headers = copy.copy(self.headers) 722 start_headers['X-Upload-Content-Type'] = self.resumable.mimetype() 723 if size != '*': 724 start_headers['X-Upload-Content-Length'] = size 725 start_headers['content-length'] = str(self.body_size) 726 727 resp, content = http.request(self.uri, self.method, 728 body=self.body, 729 headers=start_headers) 730 if resp.status == 200 and 'location' in resp: 731 self.resumable_uri = resp['location'] 732 else: 733 raise ResumableUploadError("Failed to retrieve starting URI.") 734 elif self._in_error_state: 735 # If we are in an error state then query the server for current state of 736 # the upload by sending an empty PUT and reading the 'range' header in 737 # the response. 738 headers = { 739 'Content-Range': 'bytes */%s' % size, 740 'content-length': '0' 741 } 742 resp, content = http.request(self.resumable_uri, 'PUT', 743 headers=headers) 744 status, body = self._process_response(resp, content) 745 if body: 746 # The upload was complete. 747 return (status, body) 748 749 # The httplib.request method can take streams for the body parameter, but 750 # only in Python 2.6 or later. If a stream is available under those 751 # conditions then use it as the body argument. 752 if self.resumable.has_stream() and sys.version_info[1] >= 6: 753 data = self.resumable.stream() 754 if self.resumable.chunksize() == -1: 755 data.seek(self.resumable_progress) 756 chunk_end = self.resumable.size() - self.resumable_progress - 1 757 else: 758 # Doing chunking with a stream, so wrap a slice of the stream. 759 data = _StreamSlice(data, self.resumable_progress, 760 self.resumable.chunksize()) 761 chunk_end = min( 762 self.resumable_progress + self.resumable.chunksize() - 1, 763 self.resumable.size() - 1) 764 else: 765 data = self.resumable.getbytes( 766 self.resumable_progress, self.resumable.chunksize()) 767 768 # A short read implies that we are at EOF, so finish the upload. 769 if len(data) < self.resumable.chunksize(): 770 size = str(self.resumable_progress + len(data)) 771 772 chunk_end = self.resumable_progress + len(data) - 1 773 774 headers = { 775 'Content-Range': 'bytes %d-%d/%s' % ( 776 self.resumable_progress, chunk_end, size), 777 # Must set the content-length header here because httplib can't 778 # calculate the size when working with _StreamSlice. 779 'Content-Length': str(chunk_end - self.resumable_progress + 1) 780 } 781 try: 782 resp, content = http.request(self.resumable_uri, 'PUT', 783 body=data, 784 headers=headers) 785 except: 786 self._in_error_state = True 787 raise 788 789 return self._process_response(resp, content)
790
791 - def _process_response(self, resp, content):
792 """Process the response from a single chunk upload. 793 794 Args: 795 resp: httplib2.Response, the response object. 796 content: string, the content of the response. 797 798 Returns: 799 (status, body): (ResumableMediaStatus, object) 800 The body will be None until the resumable media is fully uploaded. 801 802 Raises: 803 apiclient.errors.HttpError if the response was not a 2xx or a 308. 804 """ 805 if resp.status in [200, 201]: 806 self._in_error_state = False 807 return None, self.postproc(resp, content) 808 elif resp.status == 308: 809 self._in_error_state = False 810 # A "308 Resume Incomplete" indicates we are not done. 811 self.resumable_progress = int(resp['range'].split('-')[1]) + 1 812 if 'location' in resp: 813 self.resumable_uri = resp['location'] 814 else: 815 self._in_error_state = True 816 raise HttpError(resp, content, uri=self.uri) 817 818 return (MediaUploadProgress(self.resumable_progress, self.resumable.size()), 819 None)
820
821 - def to_json(self):
822 """Returns a JSON representation of the HttpRequest.""" 823 d = copy.copy(self.__dict__) 824 if d['resumable'] is not None: 825 d['resumable'] = self.resumable.to_json() 826 del d['http'] 827 del d['postproc'] 828 829 return simplejson.dumps(d)
830 831 @staticmethod
832 - def from_json(s, http, postproc):
833 """Returns an HttpRequest populated with info from a JSON object.""" 834 d = simplejson.loads(s) 835 if d['resumable'] is not None: 836 d['resumable'] = MediaUpload.new_from_json(d['resumable']) 837 return HttpRequest( 838 http, 839 postproc, 840 uri=d['uri'], 841 method=d['method'], 842 body=d['body'], 843 headers=d['headers'], 844 methodId=d['methodId'], 845 resumable=d['resumable'])
846
847 848 -class BatchHttpRequest(object):
849 """Batches multiple HttpRequest objects into a single HTTP request. 850 851 Example: 852 from apiclient.http import BatchHttpRequest 853 854 def list_animals(request_id, response, exception): 855 \"\"\"Do something with the animals list response.\"\"\" 856 if exception is not None: 857 # Do something with the exception. 858 pass 859 else: 860 # Do something with the response. 861 pass 862 863 def list_farmers(request_id, response, exception): 864 \"\"\"Do something with the farmers list response.\"\"\" 865 if exception is not None: 866 # Do something with the exception. 867 pass 868 else: 869 # Do something with the response. 870 pass 871 872 service = build('farm', 'v2') 873 874 batch = BatchHttpRequest() 875 876 batch.add(service.animals().list(), list_animals) 877 batch.add(service.farmers().list(), list_farmers) 878 batch.execute(http=http) 879 """ 880 881 @util.positional(1)
882 - def __init__(self, callback=None, batch_uri=None):
883 """Constructor for a BatchHttpRequest. 884 885 Args: 886 callback: callable, A callback to be called for each response, of the 887 form callback(id, response, exception). The first parameter is the 888 request id, and the second is the deserialized response object. The 889 third is an apiclient.errors.HttpError exception object if an HTTP error 890 occurred while processing the request, or None if no error occurred. 891 batch_uri: string, URI to send batch requests to. 892 """ 893 if batch_uri is None: 894 batch_uri = 'https://www.googleapis.com/batch' 895 self._batch_uri = batch_uri 896 897 # Global callback to be called for each individual response in the batch. 898 self._callback = callback 899 900 # A map from id to request. 901 self._requests = {} 902 903 # A map from id to callback. 904 self._callbacks = {} 905 906 # List of request ids, in the order in which they were added. 907 self._order = [] 908 909 # The last auto generated id. 910 self._last_auto_id = 0 911 912 # Unique ID on which to base the Content-ID headers. 913 self._base_id = None 914 915 # A map from request id to (httplib2.Response, content) response pairs 916 self._responses = {} 917 918 # A map of id(Credentials) that have been refreshed. 919 self._refreshed_credentials = {}
920
921 - def _refresh_and_apply_credentials(self, request, http):
922 """Refresh the credentials and apply to the request. 923 924 Args: 925 request: HttpRequest, the request. 926 http: httplib2.Http, the global http object for the batch. 927 """ 928 # For the credentials to refresh, but only once per refresh_token 929 # If there is no http per the request then refresh the http passed in 930 # via execute() 931 creds = None 932 if request.http is not None and hasattr(request.http.request, 933 'credentials'): 934 creds = request.http.request.credentials 935 elif http is not None and hasattr(http.request, 'credentials'): 936 creds = http.request.credentials 937 if creds is not None: 938 if id(creds) not in self._refreshed_credentials: 939 creds.refresh(http) 940 self._refreshed_credentials[id(creds)] = 1 941 942 # Only apply the credentials if we are using the http object passed in, 943 # otherwise apply() will get called during _serialize_request(). 944 if request.http is None or not hasattr(request.http.request, 945 'credentials'): 946 creds.apply(request.headers)
947
948 - def _id_to_header(self, id_):
949 """Convert an id to a Content-ID header value. 950 951 Args: 952 id_: string, identifier of individual request. 953 954 Returns: 955 A Content-ID header with the id_ encoded into it. A UUID is prepended to 956 the value because Content-ID headers are supposed to be universally 957 unique. 958 """ 959 if self._base_id is None: 960 self._base_id = uuid.uuid4() 961 962 return '<%s+%s>' % (self._base_id, urllib.quote(id_))
963
964 - def _header_to_id(self, header):
965 """Convert a Content-ID header value to an id. 966 967 Presumes the Content-ID header conforms to the format that _id_to_header() 968 returns. 969 970 Args: 971 header: string, Content-ID header value. 972 973 Returns: 974 The extracted id value. 975 976 Raises: 977 BatchError if the header is not in the expected format. 978 """ 979 if header[0] != '<' or header[-1] != '>': 980 raise BatchError("Invalid value for Content-ID: %s" % header) 981 if '+' not in header: 982 raise BatchError("Invalid value for Content-ID: %s" % header) 983 base, id_ = header[1:-1].rsplit('+', 1) 984 985 return urllib.unquote(id_)
986
987 - def _serialize_request(self, request):
988 """Convert an HttpRequest object into a string. 989 990 Args: 991 request: HttpRequest, the request to serialize. 992 993 Returns: 994 The request as a string in application/http format. 995 """ 996 # Construct status line 997 parsed = urlparse.urlparse(request.uri) 998 request_line = urlparse.urlunparse( 999 (None, None, parsed.path, parsed.params, parsed.query, None) 1000 ) 1001 status_line = request.method + ' ' + request_line + ' HTTP/1.1\n' 1002 major, minor = request.headers.get('content-type', 'application/json').split('/') 1003 msg = MIMENonMultipart(major, minor) 1004 headers = request.headers.copy() 1005 1006 if request.http is not None and hasattr(request.http.request, 1007 'credentials'): 1008 request.http.request.credentials.apply(headers) 1009 1010 # MIMENonMultipart adds its own Content-Type header. 1011 if 'content-type' in headers: 1012 del headers['content-type'] 1013 1014 for key, value in headers.iteritems(): 1015 msg[key] = value 1016 msg['Host'] = parsed.netloc 1017 msg.set_unixfrom(None) 1018 1019 if request.body is not None: 1020 msg.set_payload(request.body) 1021 msg['content-length'] = str(len(request.body)) 1022 1023 # Serialize the mime message. 1024 fp = StringIO.StringIO() 1025 # maxheaderlen=0 means don't line wrap headers. 1026 g = Generator(fp, maxheaderlen=0) 1027 g.flatten(msg, unixfrom=False) 1028 body = fp.getvalue() 1029 1030 # Strip off the \n\n that the MIME lib tacks onto the end of the payload. 1031 if request.body is None: 1032 body = body[:-2] 1033 1034 return status_line.encode('utf-8') + body
1035
1036 - def _deserialize_response(self, payload):
1037 """Convert string into httplib2 response and content. 1038 1039 Args: 1040 payload: string, headers and body as a string. 1041 1042 Returns: 1043 A pair (resp, content), such as would be returned from httplib2.request. 1044 """ 1045 # Strip off the status line 1046 status_line, payload = payload.split('\n', 1) 1047 protocol, status, reason = status_line.split(' ', 2) 1048 1049 # Parse the rest of the response 1050 parser = FeedParser() 1051 parser.feed(payload) 1052 msg = parser.close() 1053 msg['status'] = status 1054 1055 # Create httplib2.Response from the parsed headers. 1056 resp = httplib2.Response(msg) 1057 resp.reason = reason 1058 resp.version = int(protocol.split('/', 1)[1].replace('.', '')) 1059 1060 content = payload.split('\r\n\r\n', 1)[1] 1061 1062 return resp, content
1063
1064 - def _new_id(self):
1065 """Create a new id. 1066 1067 Auto incrementing number that avoids conflicts with ids already used. 1068 1069 Returns: 1070 string, a new unique id. 1071 """ 1072 self._last_auto_id += 1 1073 while str(self._last_auto_id) in self._requests: 1074 self._last_auto_id += 1 1075 return str(self._last_auto_id)
1076 1077 @util.positional(2)
1078 - def add(self, request, callback=None, request_id=None):
1079 """Add a new request. 1080 1081 Every callback added will be paired with a unique id, the request_id. That 1082 unique id will be passed back to the callback when the response comes back 1083 from the server. The default behavior is to have the library generate it's 1084 own unique id. If the caller passes in a request_id then they must ensure 1085 uniqueness for each request_id, and if they are not an exception is 1086 raised. Callers should either supply all request_ids or nevery supply a 1087 request id, to avoid such an error. 1088 1089 Args: 1090 request: HttpRequest, Request to add to the batch. 1091 callback: callable, A callback to be called for this response, of the 1092 form callback(id, response, exception). The first parameter is the 1093 request id, and the second is the deserialized response object. The 1094 third is an apiclient.errors.HttpError exception object if an HTTP error 1095 occurred while processing the request, or None if no errors occurred. 1096 request_id: string, A unique id for the request. The id will be passed to 1097 the callback with the response. 1098 1099 Returns: 1100 None 1101 1102 Raises: 1103 BatchError if a media request is added to a batch. 1104 KeyError is the request_id is not unique. 1105 """ 1106 if request_id is None: 1107 request_id = self._new_id() 1108 if request.resumable is not None: 1109 raise BatchError("Media requests cannot be used in a batch request.") 1110 if request_id in self._requests: 1111 raise KeyError("A request with this ID already exists: %s" % request_id) 1112 self._requests[request_id] = request 1113 self._callbacks[request_id] = callback 1114 self._order.append(request_id)
1115
1116 - def _execute(self, http, order, requests):
1117 """Serialize batch request, send to server, process response. 1118 1119 Args: 1120 http: httplib2.Http, an http object to be used to make the request with. 1121 order: list, list of request ids in the order they were added to the 1122 batch. 1123 request: list, list of request objects to send. 1124 1125 Raises: 1126 httplib2.HttpLib2Error if a transport error has occured. 1127 apiclient.errors.BatchError if the response is the wrong format. 1128 """ 1129 message = MIMEMultipart('mixed') 1130 # Message should not write out it's own headers. 1131 setattr(message, '_write_headers', lambda self: None) 1132 1133 # Add all the individual requests. 1134 for request_id in order: 1135 request = requests[request_id] 1136 1137 msg = MIMENonMultipart('application', 'http') 1138 msg['Content-Transfer-Encoding'] = 'binary' 1139 msg['Content-ID'] = self._id_to_header(request_id) 1140 1141 body = self._serialize_request(request) 1142 msg.set_payload(body) 1143 message.attach(msg) 1144 1145 body = message.as_string() 1146 1147 headers = {} 1148 headers['content-type'] = ('multipart/mixed; ' 1149 'boundary="%s"') % message.get_boundary() 1150 1151 resp, content = http.request(self._batch_uri, 'POST', body=body, 1152 headers=headers) 1153 1154 if resp.status >= 300: 1155 raise HttpError(resp, content, uri=self._batch_uri) 1156 1157 # Now break out the individual responses and store each one. 1158 boundary, _ = content.split(None, 1) 1159 1160 # Prepend with a content-type header so FeedParser can handle it. 1161 header = 'content-type: %s\r\n\r\n' % resp['content-type'] 1162 for_parser = header + content 1163 1164 parser = FeedParser() 1165 parser.feed(for_parser) 1166 mime_response = parser.close() 1167 1168 if not mime_response.is_multipart(): 1169 raise BatchError("Response not in multipart/mixed format.", resp=resp, 1170 content=content) 1171 1172 for part in mime_response.get_payload(): 1173 request_id = self._header_to_id(part['Content-ID']) 1174 response, content = self._deserialize_response(part.get_payload()) 1175 self._responses[request_id] = (response, content)
1176 1177 @util.positional(1)
1178 - def execute(self, http=None):
1179 """Execute all the requests as a single batched HTTP request. 1180 1181 Args: 1182 http: httplib2.Http, an http object to be used in place of the one the 1183 HttpRequest request object was constructed with. If one isn't supplied 1184 then use a http object from the requests in this batch. 1185 1186 Returns: 1187 None 1188 1189 Raises: 1190 httplib2.HttpLib2Error if a transport error has occured. 1191 apiclient.errors.BatchError if the response is the wrong format. 1192 """ 1193 1194 # If http is not supplied use the first valid one given in the requests. 1195 if http is None: 1196 for request_id in self._order: 1197 request = self._requests[request_id] 1198 if request is not None: 1199 http = request.http 1200 break 1201 1202 if http is None: 1203 raise ValueError("Missing a valid http object.") 1204 1205 self._execute(http, self._order, self._requests) 1206 1207 # Loop over all the requests and check for 401s. For each 401 request the 1208 # credentials should be refreshed and then sent again in a separate batch. 1209 redo_requests = {} 1210 redo_order = [] 1211 1212 for request_id in self._order: 1213 resp, content = self._responses[request_id] 1214 if resp['status'] == '401': 1215 redo_order.append(request_id) 1216 request = self._requests[request_id] 1217 self._refresh_and_apply_credentials(request, http) 1218 redo_requests[request_id] = request 1219 1220 if redo_requests: 1221 self._execute(http, redo_order, redo_requests) 1222 1223 # Now process all callbacks that are erroring, and raise an exception for 1224 # ones that return a non-2xx response? Or add extra parameter to callback 1225 # that contains an HttpError? 1226 1227 for request_id in self._order: 1228 resp, content = self._responses[request_id] 1229 1230 request = self._requests[request_id] 1231 callback = self._callbacks[request_id] 1232 1233 response = None 1234 exception = None 1235 try: 1236 if resp.status >= 300: 1237 raise HttpError(resp, content, uri=request.uri) 1238 response = request.postproc(resp, content) 1239 except HttpError, e: 1240 exception = e 1241 1242 if callback is not None: 1243 callback(request_id, response, exception) 1244 if self._callback is not None: 1245 self._callback(request_id, response, exception)
1246
1247 1248 -class HttpRequestMock(object):
1249 """Mock of HttpRequest. 1250 1251 Do not construct directly, instead use RequestMockBuilder. 1252 """ 1253
1254 - def __init__(self, resp, content, postproc):
1255 """Constructor for HttpRequestMock 1256 1257 Args: 1258 resp: httplib2.Response, the response to emulate coming from the request 1259 content: string, the response body 1260 postproc: callable, the post processing function usually supplied by 1261 the model class. See model.JsonModel.response() as an example. 1262 """ 1263 self.resp = resp 1264 self.content = content 1265 self.postproc = postproc 1266 if resp is None: 1267 self.resp = httplib2.Response({'status': 200, 'reason': 'OK'}) 1268 if 'reason' in self.resp: 1269 self.resp.reason = self.resp['reason']
1270
1271 - def execute(self, http=None):
1272 """Execute the request. 1273 1274 Same behavior as HttpRequest.execute(), but the response is 1275 mocked and not really from an HTTP request/response. 1276 """ 1277 return self.postproc(self.resp, self.content)
1278
1279 1280 -class RequestMockBuilder(object):
1281 """A simple mock of HttpRequest 1282 1283 Pass in a dictionary to the constructor that maps request methodIds to 1284 tuples of (httplib2.Response, content, opt_expected_body) that should be 1285 returned when that method is called. None may also be passed in for the 1286 httplib2.Response, in which case a 200 OK response will be generated. 1287 If an opt_expected_body (str or dict) is provided, it will be compared to 1288 the body and UnexpectedBodyError will be raised on inequality. 1289 1290 Example: 1291 response = '{"data": {"id": "tag:google.c...' 1292 requestBuilder = RequestMockBuilder( 1293 { 1294 'plus.activities.get': (None, response), 1295 } 1296 ) 1297 apiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder) 1298 1299 Methods that you do not supply a response for will return a 1300 200 OK with an empty string as the response content or raise an excpetion 1301 if check_unexpected is set to True. The methodId is taken from the rpcName 1302 in the discovery document. 1303 1304 For more details see the project wiki. 1305 """ 1306
1307 - def __init__(self, responses, check_unexpected=False):
1308 """Constructor for RequestMockBuilder 1309 1310 The constructed object should be a callable object 1311 that can replace the class HttpResponse. 1312 1313 responses - A dictionary that maps methodIds into tuples 1314 of (httplib2.Response, content). The methodId 1315 comes from the 'rpcName' field in the discovery 1316 document. 1317 check_unexpected - A boolean setting whether or not UnexpectedMethodError 1318 should be raised on unsupplied method. 1319 """ 1320 self.responses = responses 1321 self.check_unexpected = check_unexpected
1322
1323 - def __call__(self, http, postproc, uri, method='GET', body=None, 1324 headers=None, methodId=None, resumable=None):
1325 """Implements the callable interface that discovery.build() expects 1326 of requestBuilder, which is to build an object compatible with 1327 HttpRequest.execute(). See that method for the description of the 1328 parameters and the expected response. 1329 """ 1330 if methodId in self.responses: 1331 response = self.responses[methodId] 1332 resp, content = response[:2] 1333 if len(response) > 2: 1334 # Test the body against the supplied expected_body. 1335 expected_body = response[2] 1336 if bool(expected_body) != bool(body): 1337 # Not expecting a body and provided one 1338 # or expecting a body and not provided one. 1339 raise UnexpectedBodyError(expected_body, body) 1340 if isinstance(expected_body, str): 1341 expected_body = simplejson.loads(expected_body) 1342 body = simplejson.loads(body) 1343 if body != expected_body: 1344 raise UnexpectedBodyError(expected_body, body) 1345 return HttpRequestMock(resp, content, postproc) 1346 elif self.check_unexpected: 1347 raise UnexpectedMethodError(methodId=methodId) 1348 else: 1349 model = JsonModel(False) 1350 return HttpRequestMock(None, '{}', model.response)
1351
1352 1353 -class HttpMock(object):
1354 """Mock of httplib2.Http""" 1355
1356 - def __init__(self, filename, headers=None):
1357 """ 1358 Args: 1359 filename: string, absolute filename to read response from 1360 headers: dict, header to return with response 1361 """ 1362 if headers is None: 1363 headers = {'status': '200 OK'} 1364 f = file(filename, 'r') 1365 self.data = f.read() 1366 f.close() 1367 self.headers = headers
1368
1369 - def request(self, uri, 1370 method='GET', 1371 body=None, 1372 headers=None, 1373 redirections=1, 1374 connection_type=None):
1375 return httplib2.Response(self.headers), self.data
1376
1377 1378 -class HttpMockSequence(object):
1379 """Mock of httplib2.Http 1380 1381 Mocks a sequence of calls to request returning different responses for each 1382 call. Create an instance initialized with the desired response headers 1383 and content and then use as if an httplib2.Http instance. 1384 1385 http = HttpMockSequence([ 1386 ({'status': '401'}, ''), 1387 ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'), 1388 ({'status': '200'}, 'echo_request_headers'), 1389 ]) 1390 resp, content = http.request("http://examples.com") 1391 1392 There are special values you can pass in for content to trigger 1393 behavours that are helpful in testing. 1394 1395 'echo_request_headers' means return the request headers in the response body 1396 'echo_request_headers_as_json' means return the request headers in 1397 the response body 1398 'echo_request_body' means return the request body in the response body 1399 'echo_request_uri' means return the request uri in the response body 1400 """ 1401
1402 - def __init__(self, iterable):
1403 """ 1404 Args: 1405 iterable: iterable, a sequence of pairs of (headers, body) 1406 """ 1407 self._iterable = iterable 1408 self.follow_redirects = True
1409
1410 - def request(self, uri, 1411 method='GET', 1412 body=None, 1413 headers=None, 1414 redirections=1, 1415 connection_type=None):
1416 resp, content = self._iterable.pop(0) 1417 if content == 'echo_request_headers': 1418 content = headers 1419 elif content == 'echo_request_headers_as_json': 1420 content = simplejson.dumps(headers) 1421 elif content == 'echo_request_body': 1422 if hasattr(body, 'read'): 1423 content = body.read() 1424 else: 1425 content = body 1426 elif content == 'echo_request_uri': 1427 content = uri 1428 return httplib2.Response(resp), content
1429
1430 1431 -def set_user_agent(http, user_agent):
1432 """Set the user-agent on every request. 1433 1434 Args: 1435 http - An instance of httplib2.Http 1436 or something that acts like it. 1437 user_agent: string, the value for the user-agent header. 1438 1439 Returns: 1440 A modified instance of http that was passed in. 1441 1442 Example: 1443 1444 h = httplib2.Http() 1445 h = set_user_agent(h, "my-app-name/6.0") 1446 1447 Most of the time the user-agent will be set doing auth, this is for the rare 1448 cases where you are accessing an unauthenticated endpoint. 1449 """ 1450 request_orig = http.request 1451 1452 # The closure that will replace 'httplib2.Http.request'. 1453 def new_request(uri, method='GET', body=None, headers=None, 1454 redirections=httplib2.DEFAULT_MAX_REDIRECTS, 1455 connection_type=None): 1456 """Modify the request headers to add the user-agent.""" 1457 if headers is None: 1458 headers = {} 1459 if 'user-agent' in headers: 1460 headers['user-agent'] = user_agent + ' ' + headers['user-agent'] 1461 else: 1462 headers['user-agent'] = user_agent 1463 resp, content = request_orig(uri, method, body, headers, 1464 redirections, connection_type) 1465 return resp, content
1466 1467 http.request = new_request 1468 return http 1469
1470 1471 -def tunnel_patch(http):
1472 """Tunnel PATCH requests over POST. 1473 Args: 1474 http - An instance of httplib2.Http 1475 or something that acts like it. 1476 1477 Returns: 1478 A modified instance of http that was passed in. 1479 1480 Example: 1481 1482 h = httplib2.Http() 1483 h = tunnel_patch(h, "my-app-name/6.0") 1484 1485 Useful if you are running on a platform that doesn't support PATCH. 1486 Apply this last if you are using OAuth 1.0, as changing the method 1487 will result in a different signature. 1488 """ 1489 request_orig = http.request 1490 1491 # The closure that will replace 'httplib2.Http.request'. 1492 def new_request(uri, method='GET', body=None, headers=None, 1493 redirections=httplib2.DEFAULT_MAX_REDIRECTS, 1494 connection_type=None): 1495 """Modify the request headers to add the user-agent.""" 1496 if headers is None: 1497 headers = {} 1498 if method == 'PATCH': 1499 if 'oauth_token' in headers.get('authorization', ''): 1500 logging.warning( 1501 'OAuth 1.0 request made with Credentials after tunnel_patch.') 1502 headers['x-http-method-override'] = "PATCH" 1503 method = 'POST' 1504 resp, content = request_orig(uri, method, body, headers, 1505 redirections, connection_type) 1506 return resp, content
1507 1508 http.request = new_request 1509 return http 1510