Merge pull request #64 from pferate/python3-module_updates

Python 3 support.
diff --git a/apiclient/__init__.py b/apiclient/__init__.py
index 5efb142..d75e7a1 100644
--- a/apiclient/__init__.py
+++ b/apiclient/__init__.py
@@ -1,5 +1,7 @@
 """Retain apiclient as an alias for googleapiclient."""
 
+from six import iteritems
+
 import googleapiclient
 
 try:
@@ -36,5 +38,5 @@
 }
 
 import sys
-for module_name, module in _SUBMODULES.iteritems():
+for module_name, module in iteritems(_SUBMODULES):
   sys.modules['apiclient.%s' % module_name] = module
diff --git a/googleapiclient/discovery.py b/googleapiclient/discovery.py
index cdbf140..f3e5690 100644
--- a/googleapiclient/discovery.py
+++ b/googleapiclient/discovery.py
@@ -28,9 +28,11 @@
     'key2param',
     ]
 
+from six import StringIO
+from six.moves.urllib.parse import urlencode, urlparse, urljoin, \
+  urlunparse, parse_qsl
 
 # Standard library imports
-import StringIO
 import copy
 from email.generator import Generator
 from email.mime.multipart import MIMEMultipart
@@ -41,20 +43,13 @@
 import mimetypes
 import os
 import re
-import urllib
-import urlparse
-
-try:
-  from urlparse import parse_qsl
-except ImportError:
-  from cgi import parse_qsl
 
 # Third-party imports
 import httplib2
-from . import mimeparse
 import uritemplate
 
 # Local imports
+from googleapiclient import mimeparse
 from googleapiclient.errors import HttpError
 from googleapiclient.errors import InvalidJsonError
 from googleapiclient.errors import MediaUploadSizeError
@@ -207,6 +202,11 @@
     raise HttpError(resp, content, uri=requested_url)
 
   try:
+    content = content.decode('utf-8')
+  except AttributeError:
+    pass
+
+  try:
     service = json.loads(content)
   except ValueError as e:
     logger.error('Failed to parse as JSON: ' + content)
@@ -258,7 +258,7 @@
 
   if isinstance(service, six.string_types):
     service = json.loads(service)
-  base = urlparse.urljoin(service['rootUrl'], service['servicePath'])
+  base = urljoin(service['rootUrl'], service['servicePath'])
   schema = Schemas(service)
 
   if credentials:
@@ -505,7 +505,7 @@
   # exception here is the case of media uploads, where url will be an
   # absolute url.
   if url.startswith('http://') or url.startswith('https://'):
-    return urlparse.urljoin(base, url)
+    return urljoin(base, url)
   new_base = base if base.endswith('/') else base + '/'
   new_url = url[1:] if url.startswith('/') else url
   return new_base + new_url
@@ -712,7 +712,7 @@
         raise TypeError('media_filename must be str or MediaUpload.')
 
       # Check the maxSize
-      if maxSize > 0 and media_upload.size() > maxSize:
+      if media_upload.size() is not None and media_upload.size() > maxSize > 0:
         raise MediaUploadSizeError("Media larger than: %s" % maxSize)
 
       # Use the media path uri for media uploads
@@ -752,7 +752,7 @@
           msgRoot.attach(msg)
           # encode the body: note that we can't use `as_string`, because
           # it plays games with `From ` lines.
-          fp = StringIO.StringIO()
+          fp = StringIO()
           g = Generator(fp, mangle_from_=False)
           g.flatten(msgRoot, unixfrom=False)
           body = fp.getvalue()
@@ -859,14 +859,14 @@
     request = copy.copy(previous_request)
 
     pageToken = previous_response['nextPageToken']
-    parsed = list(urlparse.urlparse(request.uri))
+    parsed = list(urlparse(request.uri))
     q = parse_qsl(parsed[4])
 
     # Find and remove old 'pageToken' value from URI
     newq = [(key, value) for (key, value) in q if key != 'pageToken']
     newq.append(('pageToken', pageToken))
-    parsed[4] = urllib.urlencode(newq)
-    uri = urlparse.urlunparse(parsed)
+    parsed[4] = urlencode(newq)
+    uri = urlunparse(parsed)
 
     request.uri = uri
 
diff --git a/googleapiclient/http.py b/googleapiclient/http.py
index f0e9133..d09483c 100644
--- a/googleapiclient/http.py
+++ b/googleapiclient/http.py
@@ -24,34 +24,35 @@
 
 __author__ = 'jcgregorio@google.com (Joe Gregorio)'
 
-import StringIO
+from six import BytesIO, StringIO
+from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
+
 import base64
 import copy
 import gzip
 import httplib2
 import json
 import logging
-from . import mimeparse
 import mimetypes
 import os
 import random
 import sys
 import time
-import urllib
-import urlparse
 import uuid
 
 from email.generator import Generator
 from email.mime.multipart import MIMEMultipart
 from email.mime.nonmultipart import MIMENonMultipart
 from email.parser import FeedParser
-from .errors import BatchError
-from .errors import HttpError
-from .errors import InvalidChunkSizeError
-from .errors import ResumableUploadError
-from .errors import UnexpectedBodyError
-from .errors import UnexpectedMethodError
-from .model import JsonModel
+
+from googleapiclient import mimeparse
+from googleapiclient.errors import BatchError
+from googleapiclient.errors import HttpError
+from googleapiclient.errors import InvalidChunkSizeError
+from googleapiclient.errors import ResumableUploadError
+from googleapiclient.errors import UnexpectedBodyError
+from googleapiclient.errors import UnexpectedMethodError
+from googleapiclient.model import JsonModel
 from oauth2client import util
 
 
@@ -262,7 +263,7 @@
   Note that the Python file object is compatible with io.Base and can be used
   with this class also.
 
-    fh = io.BytesIO('...Some data to upload...')
+    fh = BytesIO('...Some data to upload...')
     media = MediaIoBaseUpload(fh, mimetype='image/png',
       chunksize=1024*1024, resumable=True)
     farm.animals().insert(
@@ -468,7 +469,7 @@
     resumable: bool, True if this is a resumable upload. False means upload
       in a single request.
     """
-    fd = StringIO.StringIO(body)
+    fd = BytesIO(body)
     super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
                                               resumable=resumable)
 
@@ -702,8 +703,8 @@
       self.method = 'POST'
       self.headers['x-http-method-override'] = 'GET'
       self.headers['content-type'] = 'application/x-www-form-urlencoded'
-      parsed = urlparse.urlparse(self.uri)
-      self.uri = urlparse.urlunparse(
+      parsed = urlparse(self.uri)
+      self.uri = urlunparse(
           (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
            None)
           )
@@ -1051,7 +1052,7 @@
     if self._base_id is None:
       self._base_id = uuid.uuid4()
 
-    return '<%s+%s>' % (self._base_id, urllib.quote(id_))
+    return '<%s+%s>' % (self._base_id, quote(id_))
 
   def _header_to_id(self, header):
     """Convert a Content-ID header value to an id.
@@ -1074,7 +1075,7 @@
       raise BatchError("Invalid value for Content-ID: %s" % header)
     base, id_ = header[1:-1].rsplit('+', 1)
 
-    return urllib.unquote(id_)
+    return unquote(id_)
 
   def _serialize_request(self, request):
     """Convert an HttpRequest object into a string.
@@ -1086,9 +1087,9 @@
       The request as a string in application/http format.
     """
     # Construct status line
-    parsed = urlparse.urlparse(request.uri)
-    request_line = urlparse.urlunparse(
-        (None, None, parsed.path, parsed.params, parsed.query, None)
+    parsed = urlparse(request.uri)
+    request_line = urlunparse(
+        ('', '', parsed.path, parsed.params, parsed.query, '')
         )
     status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
     major, minor = request.headers.get('content-type', 'application/json').split('/')
@@ -1113,7 +1114,7 @@
       msg['content-length'] = str(len(request.body))
 
     # Serialize the mime message.
-    fp = StringIO.StringIO()
+    fp = StringIO()
     # maxheaderlen=0 means don't line wrap headers.
     g = Generator(fp, maxheaderlen=0)
     g.flatten(msg, unixfrom=False)
@@ -1123,7 +1124,7 @@
     if request.body is None:
       body = body[:-2]
 
-    return status_line.encode('utf-8') + body
+    return status_line + body
 
   def _deserialize_response(self, payload):
     """Convert string into httplib2 response and content.
@@ -1236,7 +1237,7 @@
 
     # encode the body: note that we can't use `as_string`, because
     # it plays games with `From ` lines.
-    fp = StringIO.StringIO()
+    fp = StringIO()
     g = Generator(fp, mangle_from_=False)
     g.flatten(message, unixfrom=False)
     body = fp.getvalue()
diff --git a/googleapiclient/model.py b/googleapiclient/model.py
index 9be5a59..e8afb63 100644
--- a/googleapiclient/model.py
+++ b/googleapiclient/model.py
@@ -26,10 +26,11 @@
 
 import json
 import logging
-import urllib
+
+from six.moves.urllib.parse import urlencode
 
 from googleapiclient import __version__
-from .errors import HttpError
+from googleapiclient.errors import HttpError
 
 
 dump_request_response = False
@@ -170,7 +171,7 @@
         if isinstance(value, six.text_type) and callable(value.encode):
           value = value.encode('utf-8')
         astuples.append((key, value))
-    return '?' + urllib.urlencode(astuples)
+    return '?' + urlencode(astuples)
 
   def _log_response(self, resp, content):
     """Logs debugging information about the response if requested."""
@@ -257,7 +258,10 @@
     return json.dumps(body_value)
 
   def deserialize(self, content):
-    content = content.decode('utf-8')
+    try:
+        content = content.decode('utf-8')
+    except AttributeError:
+        pass
     body = json.loads(content)
     if self._data_wrapper and isinstance(body, dict) and 'data' in body:
       body = body['data']
diff --git a/googleapiclient/sample_tools.py b/googleapiclient/sample_tools.py
index 3e56c0a..2b4e7b4 100644
--- a/googleapiclient/sample_tools.py
+++ b/googleapiclient/sample_tools.py
@@ -95,9 +95,9 @@
     service = discovery.build(name, version, http=http)
   else:
     # Construct a service object using a local discovery document file.
-	with open(discovery_filename) as discovery_file:
-	  service = discovery.build_from_document(
-		  discovery_file.read(),
-		  base='https://www.googleapis.com/',
-		  http=http)
+    with open(discovery_filename) as discovery_file:
+      service = discovery.build_from_document(
+          discovery_file.read(),
+          base='https://www.googleapis.com/',
+          http=http)
   return (service, flags)
diff --git a/tests/test_discovery.py b/tests/test_discovery.py
index ccc69c2..e2677b0 100644
--- a/tests/test_discovery.py
+++ b/tests/test_discovery.py
@@ -25,6 +25,9 @@
 
 __author__ = 'jcgregorio@google.com (Joe Gregorio)'
 
+from six import BytesIO, StringIO
+from six.moves.urllib.parse import urlparse, parse_qs
+
 import copy
 import datetime
 import httplib2
@@ -34,15 +37,6 @@
 import pickle
 import sys
 import unittest2 as unittest
-import urlparse
-import StringIO
-
-
-try:
-  from urlparse import parse_qs
-except ImportError:
-  from cgi import parse_qs
-
 
 from googleapiclient.discovery import _fix_up_media_upload
 from googleapiclient.discovery import _fix_up_method_description
@@ -82,8 +76,8 @@
 
 def assertUrisEqual(testcase, expected, actual):
   """Test that URIs are the same, up to reordering of query parameters."""
-  expected = urlparse.urlparse(expected)
-  actual = urlparse.urlparse(actual)
+  expected = urlparse(expected)
+  actual = urlparse(actual)
   testcase.assertEqual(expected.scheme, actual.scheme)
   testcase.assertEqual(expected.netloc, actual.netloc)
   testcase.assertEqual(expected.path, actual.path)
@@ -442,7 +436,7 @@
       self.assertTrue('unexpected' in str(e))
 
   def _check_query_types(self, request):
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     q = parse_qs(parsed[4])
     self.assertEqual(q['q'], ['foo'])
     self.assertEqual(q['i'], ['1'])
@@ -479,7 +473,7 @@
     zoo = build('zoo', 'v1', http=http)
     request = zoo.query(trace='html', fields='description')
 
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     q = parse_qs(parsed[4])
     self.assertEqual(q['trace'], ['html'])
     self.assertEqual(q['fields'], ['description'])
@@ -489,7 +483,7 @@
     zoo = build('zoo', 'v1', http=http)
     request = zoo.query(trace=None, fields='description')
 
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     q = parse_qs(parsed[4])
     self.assertFalse('trace' in q)
 
@@ -498,7 +492,7 @@
     zoo = build('zoo', 'v1', http=http)
     request = zoo.animals().get(name='Lion')
 
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     q = parse_qs(parsed[4])
     self.assertEqual(q['alt'], ['json'])
     self.assertEqual(request.headers['accept'], 'application/json')
@@ -508,7 +502,7 @@
     zoo = build('zoo', 'v1', http=http)
     request = zoo.animals().getmedia(name='Lion')
 
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     q = parse_qs(parsed[4])
     self.assertTrue('alt' not in q)
     self.assertEqual(request.headers['accept'], '*/*')
@@ -560,7 +554,7 @@
     self.assertTrue(getattr(zoo, 'animals'))
 
     request = zoo.animals().list(name='bat', projection="full")
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     q = parse_qs(parsed[4])
     self.assertEqual(q['name'], ['bat'])
     self.assertEqual(q['projection'], ['full'])
@@ -570,16 +564,17 @@
     zoo = build('zoo', 'v1', http=self.http)
     self.assertTrue(getattr(zoo, 'animals'))
     request = zoo.my().favorites().list(max_results="5")
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     q = parse_qs(parsed[4])
     self.assertEqual(q['max-results'], ['5'])
 
+  @unittest.skipIf(six.PY3, 'print is not a reserved name in Python 3')
   def test_methods_with_reserved_names(self):
     self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
     zoo = build('zoo', 'v1', http=self.http)
     self.assertTrue(getattr(zoo, 'animals'))
     request = zoo.global_().print_().assert_(max_results="5")
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     self.assertEqual(parsed[2], '/zoo/v1/global/print/assert')
 
   def test_top_level_functions(self):
@@ -587,7 +582,7 @@
     zoo = build('zoo', 'v1', http=self.http)
     self.assertTrue(getattr(zoo, 'query'))
     request = zoo.query(q="foo")
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     q = parse_qs(parsed[4])
     self.assertEqual(q['q'], ['foo'])
 
@@ -602,7 +597,7 @@
     zoo = build('zoo', 'v1', http=self.http)
     request = zoo.animals().crossbreed(media_body=datafile('small.png'))
     self.assertEquals('image/png', request.headers['content-type'])
-    self.assertEquals('PNG', request.body[1:4])
+    self.assertEquals(b'PNG', request.body[1:4])
 
   def test_simple_media_raise_correct_exceptions(self):
     self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
@@ -626,7 +621,7 @@
 
     request = zoo.animals().insert(media_body=datafile('small.png'))
     self.assertEquals('image/png', request.headers['content-type'])
-    self.assertEquals('PNG', request.body[1:4])
+    self.assertEquals(b'PNG', request.body[1:4])
     assertUrisEqual(self,
         'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=media&alt=json',
         request.uri)
@@ -857,62 +852,48 @@
     self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
     zoo = build('zoo', 'v1', http=self.http)
 
-    try:
-      import io
+    # Set up a seekable stream and try to upload in single chunk.
+    fd = BytesIO(b'01234"56789"')
+    media_upload = MediaIoBaseUpload(
+        fd=fd, mimetype='text/plain', chunksize=-1, resumable=True)
 
-      # Set up a seekable stream and try to upload in single chunk.
-      fd = io.BytesIO('01234"56789"')
-      media_upload = MediaIoBaseUpload(
-          fd=fd, mimetype='text/plain', chunksize=-1, resumable=True)
+    request = zoo.animals().insert(media_body=media_upload, body=None)
 
-      request = zoo.animals().insert(media_body=media_upload, body=None)
+    # The single chunk fails, restart at the right point.
+    http = HttpMockSequence([
+      ({'status': '200',
+        'location': 'http://upload.example.com'}, ''),
+      ({'status': '308',
+        'location': 'http://upload.example.com/2',
+        'range': '0-4'}, ''),
+      ({'status': '200'}, 'echo_request_body'),
+      ])
 
-      # The single chunk fails, restart at the right point.
-      http = HttpMockSequence([
-        ({'status': '200',
-          'location': 'http://upload.example.com'}, ''),
-        ({'status': '308',
-          'location': 'http://upload.example.com/2',
-          'range': '0-4'}, ''),
-        ({'status': '200'}, 'echo_request_body'),
-        ])
-
-      body = request.execute(http=http)
-      self.assertEqual('56789', body)
-
-    except ImportError:
-      pass
-
+    body = request.execute(http=http)
+    self.assertEqual('56789', body)
 
   def test_media_io_base_stream_chunksize_resume(self):
     self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
     zoo = build('zoo', 'v1', http=self.http)
 
+    # Set up a seekable stream and try to upload in chunks.
+    fd = BytesIO(b'0123456789')
+    media_upload = MediaIoBaseUpload(
+        fd=fd, mimetype='text/plain', chunksize=5, resumable=True)
+
+    request = zoo.animals().insert(media_body=media_upload, body=None)
+
+    # The single chunk fails, pull the content sent out of the exception.
+    http = HttpMockSequence([
+      ({'status': '200',
+        'location': 'http://upload.example.com'}, ''),
+      ({'status': '400'}, 'echo_request_body'),
+      ])
+
     try:
-      import io
-
-      # Set up a seekable stream and try to upload in chunks.
-      fd = io.BytesIO('0123456789')
-      media_upload = MediaIoBaseUpload(
-          fd=fd, mimetype='text/plain', chunksize=5, resumable=True)
-
-      request = zoo.animals().insert(media_body=media_upload, body=None)
-
-      # The single chunk fails, pull the content sent out of the exception.
-      http = HttpMockSequence([
-        ({'status': '200',
-          'location': 'http://upload.example.com'}, ''),
-        ({'status': '400'}, 'echo_request_body'),
-        ])
-
-      try:
-        body = request.execute(http=http)
-      except HttpError as e:
-        self.assertEqual('01234', e.content)
-
-    except ImportError:
-      pass
-
+      body = request.execute(http=http)
+    except HttpError as e:
+      self.assertEqual(b'01234', e.content)
 
   def test_resumable_media_handle_uploads_of_unknown_size(self):
     http = HttpMockSequence([
@@ -1021,7 +1002,7 @@
     self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
     zoo = build('zoo', 'v1', http=self.http)
 
-    fd = StringIO.StringIO('data goes here')
+    fd = BytesIO(b'data goes here')
 
     # Create an upload that doesn't know the full size of the media.
     upload = MediaIoBaseUpload(
@@ -1045,7 +1026,7 @@
     zoo = build('zoo', 'v1', http=self.http)
 
     # Create an upload that doesn't know the full size of the media.
-    fd = StringIO.StringIO('data goes here')
+    fd = BytesIO(b'data goes here')
 
     upload = MediaIoBaseUpload(
         fd=fd, mimetype='image/png', chunksize=500, resumable=True)
@@ -1179,7 +1160,7 @@
     request = tasks.tasklists().list()
     next_request = tasks.tasklists().list_next(
         request, {'nextPageToken': '123abc'})
-    parsed = list(urlparse.urlparse(next_request.uri))
+    parsed = list(urlparse(next_request.uri))
     q = parse_qs(parsed[4])
     self.assertEqual(q['pageToken'][0], '123abc')
 
@@ -1196,7 +1177,7 @@
     zoo = build('zoo', 'v1', http=http)
     request = zoo.animals().get_media(name='Lion')
 
-    parsed = urlparse.urlparse(request.uri)
+    parsed = urlparse(request.uri)
     q = parse_qs(parsed[4])
     self.assertEqual(q['alt'], ['media'])
     self.assertEqual(request.headers['accept'], '*/*')
diff --git a/tests/test_http.py b/tests/test_http.py
index a3ff0ac..b47b9dc 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -23,14 +23,17 @@
 
 __author__ = 'jcgregorio@google.com (Joe Gregorio)'
 
+from six import PY3
+from six import BytesIO, StringIO
+from io import FileIO
+from six.moves.urllib.parse import urlencode
+
 # Do not remove the httplib2 import
 import httplib2
 import logging
 import os
 import unittest2 as unittest
-import urllib
 import random
-import StringIO
 import time
 
 from googleapiclient.discovery import build
@@ -135,7 +138,7 @@
     self.assertEqual(190, upload.size())
     self.assertEqual(True, upload.resumable())
     self.assertEqual(500, upload.chunksize())
-    self.assertEqual('PNG', upload.getbytes(1, 3))
+    self.assertEqual(b'PNG', upload.getbytes(1, 3))
 
     json = upload.to_json()
     new_upload = MediaUpload.new_from_json(json)
@@ -144,7 +147,7 @@
     self.assertEqual(190, new_upload.size())
     self.assertEqual(True, new_upload.resumable())
     self.assertEqual(500, new_upload.chunksize())
-    self.assertEqual('PNG', new_upload.getbytes(1, 3))
+    self.assertEqual(b'PNG', new_upload.getbytes(1, 3))
 
   def test_media_file_upload_raises_on_invalid_chunksize(self):
     self.assertRaises(InvalidChunkSizeError, MediaFileUpload,
@@ -152,12 +155,12 @@
         resumable=True)
 
   def test_media_inmemory_upload(self):
-    media = MediaInMemoryUpload('abcdef', mimetype='text/plain', chunksize=10,
+    media = MediaInMemoryUpload(b'abcdef', mimetype='text/plain', chunksize=10,
                                 resumable=True)
     self.assertEqual('text/plain', media.mimetype())
     self.assertEqual(10, media.chunksize())
     self.assertTrue(media.resumable())
-    self.assertEqual('bc', media.getbytes(1, 2))
+    self.assertEqual(b'bc', media.getbytes(1, 2))
     self.assertEqual(6, media.size())
 
   def test_http_request_to_from_json(self):
@@ -196,33 +199,28 @@
 class TestMediaIoBaseUpload(unittest.TestCase):
 
   def test_media_io_base_upload_from_file_io(self):
-    try:
-      import io
-
-      fd = io.FileIO(datafile('small.png'), 'r')
-      upload = MediaIoBaseUpload(
-          fd=fd, mimetype='image/png', chunksize=500, resumable=True)
-      self.assertEqual('image/png', upload.mimetype())
-      self.assertEqual(190, upload.size())
-      self.assertEqual(True, upload.resumable())
-      self.assertEqual(500, upload.chunksize())
-      self.assertEqual('PNG', upload.getbytes(1, 3))
-    except ImportError:
-      pass
+    fd = FileIO(datafile('small.png'), 'r')
+    upload = MediaIoBaseUpload(
+        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
+    self.assertEqual('image/png', upload.mimetype())
+    self.assertEqual(190, upload.size())
+    self.assertEqual(True, upload.resumable())
+    self.assertEqual(500, upload.chunksize())
+    self.assertEqual(b'PNG', upload.getbytes(1, 3))
 
   def test_media_io_base_upload_from_file_object(self):
-    f = open(datafile('small.png'), 'r')
+    f = open(datafile('small.png'), 'rb')
     upload = MediaIoBaseUpload(
         fd=f, mimetype='image/png', chunksize=500, resumable=True)
     self.assertEqual('image/png', upload.mimetype())
     self.assertEqual(190, upload.size())
     self.assertEqual(True, upload.resumable())
     self.assertEqual(500, upload.chunksize())
-    self.assertEqual('PNG', upload.getbytes(1, 3))
+    self.assertEqual(b'PNG', upload.getbytes(1, 3))
     f.close()
 
   def test_media_io_base_upload_serializable(self):
-    f = open(datafile('small.png'), 'r')
+    f = open(datafile('small.png'), 'rb')
     upload = MediaIoBaseUpload(fd=f, mimetype='image/png')
 
     try:
@@ -231,9 +229,10 @@
     except NotImplementedError:
       pass
 
+  @unittest.skipIf(PY3, 'Strings and Bytes are different types')
   def test_media_io_base_upload_from_string_io(self):
-    f = open(datafile('small.png'), 'r')
-    fd = StringIO.StringIO(f.read())
+    f = open(datafile('small.png'), 'rb')
+    fd = StringIO(f.read())
     f.close()
 
     upload = MediaIoBaseUpload(
@@ -242,56 +241,36 @@
     self.assertEqual(190, upload.size())
     self.assertEqual(True, upload.resumable())
     self.assertEqual(500, upload.chunksize())
-    self.assertEqual('PNG', upload.getbytes(1, 3))
+    self.assertEqual(b'PNG', upload.getbytes(1, 3))
     f.close()
 
   def test_media_io_base_upload_from_bytes(self):
-    try:
-      import io
-
-      f = open(datafile('small.png'), 'r')
-      fd = io.BytesIO(f.read())
-      upload = MediaIoBaseUpload(
-          fd=fd, mimetype='image/png', chunksize=500, resumable=True)
-      self.assertEqual('image/png', upload.mimetype())
-      self.assertEqual(190, upload.size())
-      self.assertEqual(True, upload.resumable())
-      self.assertEqual(500, upload.chunksize())
-      self.assertEqual('PNG', upload.getbytes(1, 3))
-    except ImportError:
-      pass
+    f = open(datafile('small.png'), 'rb')
+    fd = BytesIO(f.read())
+    upload = MediaIoBaseUpload(
+        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
+    self.assertEqual('image/png', upload.mimetype())
+    self.assertEqual(190, upload.size())
+    self.assertEqual(True, upload.resumable())
+    self.assertEqual(500, upload.chunksize())
+    self.assertEqual(b'PNG', upload.getbytes(1, 3))
 
   def test_media_io_base_upload_raises_on_invalid_chunksize(self):
-    try:
-      import io
-
-      f = open(datafile('small.png'), 'r')
-      fd = io.BytesIO(f.read())
-      self.assertRaises(InvalidChunkSizeError, MediaIoBaseUpload,
-          fd, 'image/png', chunksize=-2, resumable=True)
-    except ImportError:
-      pass
+    f = open(datafile('small.png'), 'rb')
+    fd = BytesIO(f.read())
+    self.assertRaises(InvalidChunkSizeError, MediaIoBaseUpload,
+        fd, 'image/png', chunksize=-2, resumable=True)
 
   def test_media_io_base_upload_streamable(self):
-    try:
-      import io
-
-      fd = io.BytesIO('stuff')
-      upload = MediaIoBaseUpload(
-          fd=fd, mimetype='image/png', chunksize=500, resumable=True)
-      self.assertEqual(True, upload.has_stream())
-      self.assertEqual(fd, upload.stream())
-    except ImportError:
-      pass
+    fd = BytesIO(b'stuff')
+    upload = MediaIoBaseUpload(
+        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
+    self.assertEqual(True, upload.has_stream())
+    self.assertEqual(fd, upload.stream())
 
   def test_media_io_base_next_chunk_retries(self):
-    try:
-      import io
-    except ImportError:
-      return
-
-    f = open(datafile('small.png'), 'r')
-    fd = io.BytesIO(f.read())
+    f = open(datafile('small.png'), 'rb')
+    fd = BytesIO(f.read())
     upload = MediaIoBaseUpload(
         fd=fd, mimetype='image/png', chunksize=500, resumable=True)
 
@@ -333,14 +312,14 @@
     http = HttpMock(datafile('zoo.json'), {'status': '200'})
     zoo = build('zoo', 'v1', http=http)
     self.request = zoo.animals().get_media(name='Lion')
-    self.fd = StringIO.StringIO()
+    self.fd = BytesIO()
 
   def test_media_io_base_download(self):
     self.request.http = HttpMockSequence([
       ({'status': '200',
-        'content-range': '0-2/5'}, '123'),
+        'content-range': '0-2/5'}, b'123'),
       ({'status': '200',
-        'content-range': '3-4/5'}, '45'),
+        'content-range': '3-4/5'}, b'45'),
     ])
     self.assertEqual(True, self.request.http.follow_redirects)
 
@@ -356,7 +335,7 @@
 
     status, done = download.next_chunk()
 
-    self.assertEqual(self.fd.getvalue(), '123')
+    self.assertEqual(self.fd.getvalue(), b'123')
     self.assertEqual(False, done)
     self.assertEqual(3, download._progress)
     self.assertEqual(5, download._total_size)
@@ -364,7 +343,7 @@
 
     status, done = download.next_chunk()
 
-    self.assertEqual(self.fd.getvalue(), '12345')
+    self.assertEqual(self.fd.getvalue(), b'12345')
     self.assertEqual(True, done)
     self.assertEqual(5, download._progress)
     self.assertEqual(5, download._total_size)
@@ -372,9 +351,9 @@
   def test_media_io_base_download_handle_redirects(self):
     self.request.http = HttpMockSequence([
       ({'status': '200',
-        'content-location': 'https://secure.example.net/lion'}, ''),
+        'content-location': 'https://secure.example.net/lion'}, b''),
       ({'status': '200',
-        'content-range': '0-2/5'}, 'abc'),
+        'content-range': '0-2/5'}, b'abc'),
     ])
 
     download = MediaIoBaseDownload(
@@ -401,12 +380,12 @@
     # Even after raising an exception we can pick up where we left off.
     self.request.http = HttpMockSequence([
       ({'status': '200',
-        'content-range': '0-2/5'}, '123'),
+        'content-range': '0-2/5'}, b'123'),
     ])
 
     status, done = download.next_chunk()
 
-    self.assertEqual(self.fd.getvalue(), '123')
+    self.assertEqual(self.fd.getvalue(), b'123')
 
   def test_media_io_base_download_retries_5xx(self):
     self.request.http = HttpMockSequence([
@@ -414,12 +393,12 @@
       ({'status': '500'}, ''),
       ({'status': '500'}, ''),
       ({'status': '200',
-        'content-range': '0-2/5'}, '123'),
+        'content-range': '0-2/5'}, b'123'),
       ({'status': '503'}, ''),
       ({'status': '503'}, ''),
       ({'status': '503'}, ''),
       ({'status': '200',
-        'content-range': '3-4/5'}, '45'),
+        'content-range': '3-4/5'}, b'45'),
     ])
 
     download = MediaIoBaseDownload(
@@ -442,7 +421,7 @@
     # Check for exponential backoff using the rand function above.
     self.assertEqual([20, 40, 80], sleeptimes)
 
-    self.assertEqual(self.fd.getvalue(), '123')
+    self.assertEqual(self.fd.getvalue(), b'123')
     self.assertEqual(False, done)
     self.assertEqual(3, download._progress)
     self.assertEqual(5, download._total_size)
@@ -456,7 +435,7 @@
     # Check for exponential backoff using the rand function above.
     self.assertEqual([20, 40, 80], sleeptimes)
 
-    self.assertEqual(self.fd.getvalue(), '12345')
+    self.assertEqual(self.fd.getvalue(), b'12345')
     self.assertEqual(True, done)
     self.assertEqual(5, download._progress)
     self.assertEqual(5, download._total_size)
@@ -693,7 +672,7 @@
         None,
         'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
         method='POST',
-        body='{}',
+        body=u'{}',
         headers={'content-type': 'application/json'},
         methodId=None,
         resumable=None)
@@ -702,7 +681,7 @@
 
   def test_serialize_request_media_body(self):
     batch = BatchHttpRequest()
-    f = open(datafile('small.png'))
+    f = open(datafile('small.png'), 'rb')
     body = f.read()
     f.close()
 
@@ -725,7 +704,7 @@
         None,
         'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
         method='POST',
-        body='',
+        body=b'',
         headers={'content-type': 'application/json'},
         methodId=None,
         resumable=None)
@@ -940,7 +919,7 @@
     req = HttpRequest(
         http,
         _postproc,
-        'http://example.com?' + urllib.urlencode(query),
+        'http://example.com?' + urlencode(query),
         method='GET',
         body=None,
         headers={},
@@ -963,21 +942,21 @@
   """Test _StreamSlice."""
 
   def setUp(self):
-    self.stream = StringIO.StringIO('0123456789')
+    self.stream = BytesIO(b'0123456789')
 
   def test_read(self):
     s =  _StreamSlice(self.stream, 0, 4)
-    self.assertEqual('', s.read(0))
-    self.assertEqual('0', s.read(1))
-    self.assertEqual('123', s.read())
+    self.assertEqual(b'', s.read(0))
+    self.assertEqual(b'0', s.read(1))
+    self.assertEqual(b'123', s.read())
 
   def test_read_too_much(self):
     s =  _StreamSlice(self.stream, 1, 4)
-    self.assertEqual('1234', s.read(6))
+    self.assertEqual(b'1234', s.read(6))
 
   def test_read_all(self):
     s =  _StreamSlice(self.stream, 2, 1)
-    self.assertEqual('2', s.read(-1))
+    self.assertEqual(b'2', s.read(-1))
 
 
 class TestResponseCallback(unittest.TestCase):
diff --git a/tests/test_json_model.py b/tests/test_json_model.py
index 462038a..b198652 100644
--- a/tests/test_json_model.py
+++ b/tests/test_json_model.py
@@ -34,11 +34,7 @@
 from googleapiclient.errors import HttpError
 from googleapiclient.model import JsonModel
 
-# Python 2.5 requires different modules
-try:
-  from urlparse import parse_qs
-except ImportError:
-  from cgi import parse_qs
+from six.moves.urllib.parse import parse_qs
 
 
 class Model(unittest.TestCase):
@@ -125,7 +121,12 @@
 
     query_dict = parse_qs(query[1:])
     self.assertEqual(query_dict['foo'], ['1'])
-    self.assertEqual(query_dict['bar'], [u'\N{COMET}'.encode('utf-8')])
+    if six.PY3:
+      # Python 3, no need to encode
+      self.assertEqual(query_dict['bar'], [u'\N{COMET}'])
+    else:
+      # Python 2, encode string
+      self.assertEqual(query_dict['bar'], [u'\N{COMET}'.encode('utf-8')])
     self.assertEqual(query_dict['baz'], ['fe', 'fi', 'fo', 'fum'])
     self.assertTrue('qux' not in query_dict)
     self.assertEqual(body, '{}')
diff --git a/tests/test_protobuf_model.py b/tests/test_protobuf_model.py
index b47812f..465d120 100644
--- a/tests/test_protobuf_model.py
+++ b/tests/test_protobuf_model.py
@@ -29,11 +29,7 @@
 from googleapiclient.errors import HttpError
 from googleapiclient.model import ProtocolBufferModel
 
-# Python 2.5 requires different modules
-try:
-  from urlparse import parse_qs
-except ImportError:
-  from cgi import parse_qs
+from six.moves.urllib.parse import parse_qs
 
 
 class MockProtocolBuffer(object):
diff --git a/tests/test_schema.py b/tests/test_schema.py
index 8d07fed..c1216a5 100644
--- a/tests/test_schema.py
+++ b/tests/test_schema.py
@@ -20,7 +20,6 @@
 import json
 import os
 import unittest2 as unittest
-import StringIO
 
 from googleapiclient.schema import Schemas