Updating urllib related imports and usage. (Using six)
diff --git a/googleapiclient/discovery.py b/googleapiclient/discovery.py
index 56d182e..a6180f8 100644
--- a/googleapiclient/discovery.py
+++ b/googleapiclient/discovery.py
@@ -29,6 +29,8 @@
]
from six import StringIO
+from six.moves.urllib.parse import urlencode, urlparse, urljoin, \
+ urlunparse, parse_qsl
# Standard library imports
import copy
@@ -41,13 +43,6 @@
import mimetypes
import os
import re
-import urllib
-import urlparse
-
-try:
- from urlparse import parse_qsl
-except ImportError:
- from cgi import parse_qsl
# Third-party imports
import httplib2
@@ -258,7 +253,7 @@
if isinstance(service, six.string_types):
service = json.loads(service)
- base = urlparse.urljoin(service['rootUrl'], service['servicePath'])
+ base = urljoin(service['rootUrl'], service['servicePath'])
schema = Schemas(service)
if credentials:
@@ -505,7 +500,7 @@
# exception here is the case of media uploads, where url will be an
# absolute url.
if url.startswith('http://') or url.startswith('https://'):
- return urlparse.urljoin(base, url)
+ return urljoin(base, url)
new_base = base if base.endswith('/') else base + '/'
new_url = url[1:] if url.startswith('/') else url
return new_base + new_url
@@ -859,14 +854,14 @@
request = copy.copy(previous_request)
pageToken = previous_response['nextPageToken']
- parsed = list(urlparse.urlparse(request.uri))
+ parsed = list(urlparse(request.uri))
q = parse_qsl(parsed[4])
# Find and remove old 'pageToken' value from URI
newq = [(key, value) for (key, value) in q if key != 'pageToken']
newq.append(('pageToken', pageToken))
- parsed[4] = urllib.urlencode(newq)
- uri = urlparse.urlunparse(parsed)
+ parsed[4] = urlencode(newq)
+ uri = urlunparse(parsed)
request.uri = uri
diff --git a/googleapiclient/http.py b/googleapiclient/http.py
index 64d3f3c..160cb45 100644
--- a/googleapiclient/http.py
+++ b/googleapiclient/http.py
@@ -25,6 +25,7 @@
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
from six import BytesIO, StringIO
+from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
import base64
import copy
@@ -38,8 +39,6 @@
import random
import sys
import time
-import urllib
-import urlparse
import uuid
from email.generator import Generator
@@ -703,8 +702,8 @@
self.method = 'POST'
self.headers['x-http-method-override'] = 'GET'
self.headers['content-type'] = 'application/x-www-form-urlencoded'
- parsed = urlparse.urlparse(self.uri)
- self.uri = urlparse.urlunparse(
+ parsed = urlparse(self.uri)
+ self.uri = urlunparse(
(parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
None)
)
@@ -1052,7 +1051,7 @@
if self._base_id is None:
self._base_id = uuid.uuid4()
- return '<%s+%s>' % (self._base_id, urllib.quote(id_))
+ return '<%s+%s>' % (self._base_id, quote(id_))
def _header_to_id(self, header):
"""Convert a Content-ID header value to an id.
@@ -1075,7 +1074,7 @@
raise BatchError("Invalid value for Content-ID: %s" % header)
base, id_ = header[1:-1].rsplit('+', 1)
- return urllib.unquote(id_)
+ return unquote(id_)
def _serialize_request(self, request):
"""Convert an HttpRequest object into a string.
@@ -1087,8 +1086,8 @@
The request as a string in application/http format.
"""
# Construct status line
- parsed = urlparse.urlparse(request.uri)
- request_line = urlparse.urlunparse(
+ parsed = urlparse(request.uri)
+ request_line = urlunparse(
(None, None, parsed.path, parsed.params, parsed.query, None)
)
status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
diff --git a/googleapiclient/model.py b/googleapiclient/model.py
index 9be5a59..2402d84 100644
--- a/googleapiclient/model.py
+++ b/googleapiclient/model.py
@@ -26,7 +26,8 @@
import json
import logging
-import urllib
+
+from six.moves.urllib.parse import urlencode
from googleapiclient import __version__
from .errors import HttpError
@@ -170,7 +171,7 @@
if isinstance(value, six.text_type) and callable(value.encode):
value = value.encode('utf-8')
astuples.append((key, value))
- return '?' + urllib.urlencode(astuples)
+ return '?' + urlencode(astuples)
def _log_response(self, resp, content):
"""Logs debugging information about the response if requested."""
diff --git a/tests/test_discovery.py b/tests/test_discovery.py
index 70bd248..0a71cfc 100644
--- a/tests/test_discovery.py
+++ b/tests/test_discovery.py
@@ -26,6 +26,7 @@
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
from six import BytesIO, StringIO
+from six.moves.urllib.parse import urlparse, parse_qs
import copy
import datetime
@@ -36,14 +37,6 @@
import pickle
import sys
import unittest2 as unittest
-import urlparse
-
-
-try:
- from urlparse import parse_qs
-except ImportError:
- from cgi import parse_qs
-
from googleapiclient.discovery import _fix_up_media_upload
from googleapiclient.discovery import _fix_up_method_description
@@ -83,8 +76,8 @@
def assertUrisEqual(testcase, expected, actual):
"""Test that URIs are the same, up to reordering of query parameters."""
- expected = urlparse.urlparse(expected)
- actual = urlparse.urlparse(actual)
+ expected = urlparse(expected)
+ actual = urlparse(actual)
testcase.assertEqual(expected.scheme, actual.scheme)
testcase.assertEqual(expected.netloc, actual.netloc)
testcase.assertEqual(expected.path, actual.path)
@@ -443,7 +436,7 @@
self.assertTrue('unexpected' in str(e))
def _check_query_types(self, request):
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['q'], ['foo'])
self.assertEqual(q['i'], ['1'])
@@ -480,7 +473,7 @@
zoo = build('zoo', 'v1', http=http)
request = zoo.query(trace='html', fields='description')
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['trace'], ['html'])
self.assertEqual(q['fields'], ['description'])
@@ -490,7 +483,7 @@
zoo = build('zoo', 'v1', http=http)
request = zoo.query(trace=None, fields='description')
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertFalse('trace' in q)
@@ -499,7 +492,7 @@
zoo = build('zoo', 'v1', http=http)
request = zoo.animals().get(name='Lion')
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['alt'], ['json'])
self.assertEqual(request.headers['accept'], 'application/json')
@@ -509,7 +502,7 @@
zoo = build('zoo', 'v1', http=http)
request = zoo.animals().getmedia(name='Lion')
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertTrue('alt' not in q)
self.assertEqual(request.headers['accept'], '*/*')
@@ -561,7 +554,7 @@
self.assertTrue(getattr(zoo, 'animals'))
request = zoo.animals().list(name='bat', projection="full")
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['name'], ['bat'])
self.assertEqual(q['projection'], ['full'])
@@ -571,7 +564,7 @@
zoo = build('zoo', 'v1', http=self.http)
self.assertTrue(getattr(zoo, 'animals'))
request = zoo.my().favorites().list(max_results="5")
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['max-results'], ['5'])
@@ -580,7 +573,7 @@
zoo = build('zoo', 'v1', http=self.http)
self.assertTrue(getattr(zoo, 'animals'))
request = zoo.global_().print_().assert_(max_results="5")
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
self.assertEqual(parsed[2], '/zoo/v1/global/print/assert')
def test_top_level_functions(self):
@@ -588,7 +581,7 @@
zoo = build('zoo', 'v1', http=self.http)
self.assertTrue(getattr(zoo, 'query'))
request = zoo.query(q="foo")
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['q'], ['foo'])
@@ -1166,7 +1159,7 @@
request = tasks.tasklists().list()
next_request = tasks.tasklists().list_next(
request, {'nextPageToken': '123abc'})
- parsed = list(urlparse.urlparse(next_request.uri))
+ parsed = list(urlparse(next_request.uri))
q = parse_qs(parsed[4])
self.assertEqual(q['pageToken'][0], '123abc')
@@ -1183,7 +1176,7 @@
zoo = build('zoo', 'v1', http=http)
request = zoo.animals().get_media(name='Lion')
- parsed = urlparse.urlparse(request.uri)
+ parsed = urlparse(request.uri)
q = parse_qs(parsed[4])
self.assertEqual(q['alt'], ['media'])
self.assertEqual(request.headers['accept'], '*/*')
diff --git a/tests/test_http.py b/tests/test_http.py
index 0608c92..bb21335 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -25,13 +25,13 @@
from six import BytesIO, StringIO
from io import FileIO
+from six.moves.urllib.parse import urlencode
# Do not remove the httplib2 import
import httplib2
import logging
import os
import unittest2 as unittest
-import urllib
import random
import time
@@ -917,7 +917,7 @@
req = HttpRequest(
http,
_postproc,
- 'http://example.com?' + urllib.urlencode(query),
+ 'http://example.com?' + urlencode(query),
method='GET',
body=None,
headers={},
diff --git a/tests/test_json_model.py b/tests/test_json_model.py
index 462038a..c4c3901 100644
--- a/tests/test_json_model.py
+++ b/tests/test_json_model.py
@@ -34,11 +34,7 @@
from googleapiclient.errors import HttpError
from googleapiclient.model import JsonModel
-# Python 2.5 requires different modules
-try:
- from urlparse import parse_qs
-except ImportError:
- from cgi import parse_qs
+from six.moves.urllib.parse import parse_qs
class Model(unittest.TestCase):
diff --git a/tests/test_protobuf_model.py b/tests/test_protobuf_model.py
index b47812f..465d120 100644
--- a/tests/test_protobuf_model.py
+++ b/tests/test_protobuf_model.py
@@ -29,11 +29,7 @@
from googleapiclient.errors import HttpError
from googleapiclient.model import ProtocolBufferModel
-# Python 2.5 requires different modules
-try:
- from urlparse import parse_qs
-except ImportError:
- from cgi import parse_qs
+from six.moves.urllib.parse import parse_qs
class MockProtocolBuffer(object):