fix: drop six dependency (#1452)
Fixes #1446 🦕
diff --git a/apiclient/__init__.py b/apiclient/__init__.py
index 8d9c4ec..abacd29 100644
--- a/apiclient/__init__.py
+++ b/apiclient/__init__.py
@@ -1,7 +1,5 @@
"""Retain apiclient as an alias for googleapiclient."""
-from six import iteritems
-
import googleapiclient
from googleapiclient import channel
@@ -32,5 +30,5 @@
import sys
-for module_name, module in iteritems(_SUBMODULES):
+for module_name, module in _SUBMODULES.items():
sys.modules["apiclient.%s" % module_name] = module
diff --git a/googleapiclient/_helpers.py b/googleapiclient/_helpers.py
index ddbd0e2..eb5e090 100644
--- a/googleapiclient/_helpers.py
+++ b/googleapiclient/_helpers.py
@@ -17,10 +17,7 @@
import functools
import inspect
import logging
-import warnings
-
-import six
-from six.moves import urllib
+import urllib
logger = logging.getLogger(__name__)
@@ -135,7 +132,7 @@
return positional_wrapper
- if isinstance(max_positional_args, six.integer_types):
+ if isinstance(max_positional_args, int):
return positional_decorator
else:
args, _, _, defaults = inspect.getargspec(max_positional_args)
@@ -156,7 +153,7 @@
"""
urlencoded_params = urllib.parse.parse_qs(content)
params = {}
- for key, value in six.iteritems(urlencoded_params):
+ for key, value in urlencoded_params.items():
if len(value) != 1:
msg = "URL-encoded content contains a repeated value:" "%s -> %s" % (
key,
diff --git a/googleapiclient/channel.py b/googleapiclient/channel.py
index efff0f6..70af779 100644
--- a/googleapiclient/channel.py
+++ b/googleapiclient/channel.py
@@ -76,7 +76,6 @@
from googleapiclient import errors
from googleapiclient import _helpers as util
-import six
# The unix time epoch starts at midnight 1970.
@@ -104,7 +103,7 @@
def _upper_header_keys(headers):
new_headers = {}
- for k, v in six.iteritems(headers):
+ for k, v in headers.items():
new_headers[k.upper()] = v
return new_headers
@@ -244,7 +243,7 @@
Args:
resp: dict, The response from a watch() method.
"""
- for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
+ for json_name, param_name in CHANNEL_PARAMS.items():
value = resp.get(json_name)
if value is not None:
setattr(self, param_name, value)
diff --git a/googleapiclient/discovery.py b/googleapiclient/discovery.py
index 3273899..1b7aedd 100644
--- a/googleapiclient/discovery.py
+++ b/googleapiclient/discovery.py
@@ -17,31 +17,26 @@
A client library for Google's discovery based APIs.
"""
from __future__ import absolute_import
-import six
__author__ = "jcgregorio@google.com (Joe Gregorio)"
__all__ = ["build", "build_from_document", "fix_method_name", "key2param"]
-from six.moves import http_client
-from six.moves.urllib.parse import urljoin
-
-
# Standard library imports
import copy
from collections import OrderedDict
-
-try:
- from email.generator import BytesGenerator
-except ImportError:
- from email.generator import Generator as BytesGenerator
+import collections.abc
+from email.generator import BytesGenerator
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
+import http.client as http_client
+import io
import json
import keyword
import logging
import mimetypes
import os
import re
+import urllib
# Third-party imports
import httplib2
@@ -506,7 +501,7 @@
if client_options is None:
client_options = google.api_core.client_options.ClientOptions()
- if isinstance(client_options, six.moves.collections_abc.Mapping):
+ if isinstance(client_options, collections.abc.Mapping):
client_options = google.api_core.client_options.from_dict(client_options)
if http is not None:
@@ -519,9 +514,9 @@
if option is not None:
raise ValueError("Arguments http and {} are mutually exclusive".format(name))
- if isinstance(service, six.string_types):
+ if isinstance(service, str):
service = json.loads(service)
- elif isinstance(service, six.binary_type):
+ elif isinstance(service, bytes):
service = json.loads(service.decode("utf-8"))
if "rootUrl" not in service and isinstance(http, (HttpMock, HttpMockSequence)):
@@ -534,7 +529,7 @@
raise InvalidJsonError()
# If an API Endpoint is provided on client options, use that as the base URL
- base = urljoin(service["rootUrl"], service["servicePath"])
+ base = urllib.parse.urljoin(service["rootUrl"], service["servicePath"])
if client_options.api_endpoint:
base = client_options.api_endpoint
@@ -630,7 +625,7 @@
if "mtlsRootUrl" in service and (
not client_options or not client_options.api_endpoint
):
- mtls_endpoint = urljoin(service["mtlsRootUrl"], service["servicePath"])
+ mtls_endpoint = urllib.parse.urljoin(service["mtlsRootUrl"], service["servicePath"])
use_mtls_endpoint = os.getenv(GOOGLE_API_USE_MTLS_ENDPOINT, "auto")
if not use_mtls_endpoint in ("never", "auto", "always"):
@@ -759,7 +754,7 @@
parameters = method_desc.setdefault("parameters", {})
# Add in the parameters common to all methods.
- for name, description in six.iteritems(root_desc.get("parameters", {})):
+ for name, description in root_desc.get("parameters", {}).items():
parameters[name] = description
# Add in undocumented query parameters.
@@ -875,7 +870,7 @@
# exception here is the case of media uploads, where url will be an
# absolute url.
if url.startswith("http://") or url.startswith("https://"):
- return urljoin(base, url)
+ return urllib.parse.urljoin(base, url)
new_base = base if base.endswith("/") else base + "/"
new_url = url[1:] if url.startswith("/") else url
return new_base + new_url
@@ -943,7 +938,7 @@
"""
parameters = method_desc.get("parameters", {})
sorted_parameters = OrderedDict(sorted(parameters.items()))
- for arg, desc in six.iteritems(sorted_parameters):
+ for arg, desc in sorted_parameters.items():
param = key2param(arg)
self.argmap[param] = arg
@@ -997,9 +992,9 @@
def method(self, **kwargs):
# Don't bother with doc string, it will be over-written by createMethod.
- for name in six.iterkeys(kwargs):
+ for name in kwargs:
if name not in parameters.argmap:
- raise TypeError('Got an unexpected keyword argument "%s"' % name)
+ raise TypeError('Got an unexpected keyword argument {}'.format(name))
# Remove args that have a value of None.
keys = list(kwargs.keys())
@@ -1016,9 +1011,9 @@
):
raise TypeError('Missing required parameter "%s"' % name)
- for name, regex in six.iteritems(parameters.pattern_params):
+ for name, regex in parameters.pattern_params.items():
if name in kwargs:
- if isinstance(kwargs[name], six.string_types):
+ if isinstance(kwargs[name], str):
pvalues = [kwargs[name]]
else:
pvalues = kwargs[name]
@@ -1029,13 +1024,13 @@
% (name, pvalue, regex)
)
- for name, enums in six.iteritems(parameters.enum_params):
+ for name, enums in parameters.enum_params.items():
if name in kwargs:
# We need to handle the case of a repeated enum
# name differently, since we want to handle both
# arg='value' and arg=['value1', 'value2']
if name in parameters.repeated_params and not isinstance(
- kwargs[name], six.string_types
+ kwargs[name], str
):
values = kwargs[name]
else:
@@ -1049,7 +1044,7 @@
actual_query_params = {}
actual_path_params = {}
- for key, value in six.iteritems(kwargs):
+ for key, value in kwargs.items():
to_type = parameters.param_types.get(key, "string")
# For repeated parameters we cast each member of the list.
if key in parameters.repeated_params and type(value) == type([]):
@@ -1086,7 +1081,7 @@
if media_filename:
# Ensure we end up with a valid MediaUpload object.
- if isinstance(media_filename, six.string_types):
+ if isinstance(media_filename, str):
if media_mime_type is None:
logger.warning(
"media_mime_type argument not specified: trying to auto-detect for %s",
@@ -1144,7 +1139,7 @@
msgRoot.attach(msg)
# encode the body: note that we can't use `as_string`, because
# it plays games with `From ` lines.
- fp = six.BytesIO()
+ fp = io.BytesIO()
g = _BytesGenerator(fp, mangle_from_=False)
g.flatten(msgRoot, unixfrom=False)
body = fp.getvalue()
@@ -1218,7 +1213,7 @@
enumDesc = paramdesc.get("enumDescriptions", [])
if enum and enumDesc:
docs.append(" Allowed values\n")
- for (name, desc) in six.moves.zip(enum, enumDesc):
+ for (name, desc) in zip(enum, enumDesc):
docs.append(" %s - %s\n" % (name, desc))
if "response" in methodDesc:
if methodName.endswith("_media"):
@@ -1415,7 +1410,7 @@
# Add basic methods to Resource
if "methods" in resourceDesc:
- for methodName, methodDesc in six.iteritems(resourceDesc["methods"]):
+ for methodName, methodDesc in resourceDesc["methods"].items():
fixedMethodName, method = createMethod(
methodName, methodDesc, rootDesc, schema
)
@@ -1463,7 +1458,7 @@
return (methodName, methodResource)
- for methodName, methodDesc in six.iteritems(resourceDesc["resources"]):
+ for methodName, methodDesc in resourceDesc["resources"].items():
fixedMethodName, method = createResourceMethod(methodName, methodDesc)
self._set_dynamic_attr(
fixedMethodName, method.__get__(self, self.__class__)
@@ -1475,7 +1470,7 @@
# type either the method's request (query parameters) or request body.
if "methods" not in resourceDesc:
return
- for methodName, methodDesc in six.iteritems(resourceDesc["methods"]):
+ for methodName, methodDesc in resourceDesc["methods"].items():
nextPageTokenName = _findPageTokenName(
_methodProperties(methodDesc, schema, "response")
)
diff --git a/googleapiclient/http.py b/googleapiclient/http.py
index 0dd9c32..1b661e1 100644
--- a/googleapiclient/http.py
+++ b/googleapiclient/http.py
@@ -19,15 +19,13 @@
actual HTTP request.
"""
from __future__ import absolute_import
-import six
__author__ = "jcgregorio@google.com (Joe Gregorio)"
-from six import BytesIO, StringIO
-from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
-
import copy
import httplib2
+import http.client as http_client
+import io
import json
import logging
import mimetypes
@@ -35,6 +33,7 @@
import random
import socket
import time
+import urllib
import uuid
# TODO(issue 221): Remove this conditional import jibbajabba.
@@ -76,11 +75,6 @@
_LEGACY_BATCH_URI = "https://www.googleapis.com/batch"
-if six.PY2:
- # That's a builtin python3 exception, nonexistent in python2.
- # Defined to None to avoid NameError while trying to catch it
- ConnectionError = None
-
def _should_retry_response(resp_status, content):
"""Determines whether a response should be retried.
@@ -104,7 +98,7 @@
# For 403 errors, we have to check for the `reason` in the response to
# determine if we should retry.
- if resp_status == six.moves.http_client.FORBIDDEN:
+ if resp_status == http_client.FORBIDDEN:
# If there's no details about the 403 type, don't retry.
if not content:
return False
@@ -175,7 +169,7 @@
resp = None
content = None
exception = None
- for retry_num in six.moves.range(num_retries + 1):
+ for retry_num in range(num_retries + 1):
if retry_num > 0:
# Sleep before retrying.
sleep_time = rand() * 2 ** retry_num
@@ -634,7 +628,7 @@
class MediaInMemoryUpload(MediaIoBaseUpload):
"""MediaUpload for a chunk of bytes.
- DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
+ DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or io.StringIO for
the stream.
"""
@@ -648,7 +642,7 @@
):
"""Create a new MediaInMemoryUpload.
- DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
+ DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or io.StringIO for
the stream.
Args:
@@ -660,7 +654,7 @@
resumable: bool, True if this is a resumable upload. False means upload
in a single request.
"""
- fd = BytesIO(body)
+ fd = io.BytesIO(body)
super(MediaInMemoryUpload, self).__init__(
fd, mimetype, chunksize=chunksize, resumable=resumable
)
@@ -710,7 +704,7 @@
self._rand = random.random
self._headers = {}
- for k, v in six.iteritems(request.headers):
+ for k, v in request.headers.items():
# allow users to supply custom headers by setting them on the request
# but strip out the ones that are set by default on requests generated by
# API methods like Drive's files().get(fileId=...)
@@ -917,8 +911,8 @@
self.method = "POST"
self.headers["x-http-method-override"] = "GET"
self.headers["content-type"] = "application/x-www-form-urlencoded"
- parsed = urlparse(self.uri)
- self.uri = urlunparse(
+ parsed = urllib.parse.urlparse(self.uri)
+ self.uri = urllib.parse.urlunparse(
(parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, None)
)
self.body = parsed.query
@@ -1077,7 +1071,7 @@
size,
)
- for retry_num in six.moves.range(num_retries + 1):
+ for retry_num in range(num_retries + 1):
if retry_num > 0:
self._sleep(self._rand() * 2 ** retry_num)
LOGGER.warning(
@@ -1298,7 +1292,7 @@
# NB: we intentionally leave whitespace between base/id and '+', so RFC2822
# line folding works properly on Python 3; see
# https://github.com/googleapis/google-api-python-client/issues/164
- return "<%s + %s>" % (self._base_id, quote(id_))
+ return "<%s + %s>" % (self._base_id, urllib.parse.quote(id_))
def _header_to_id(self, header):
"""Convert a Content-ID header value to an id.
@@ -1321,7 +1315,7 @@
raise BatchError("Invalid value for Content-ID: %s" % header)
base, id_ = header[1:-1].split(" + ", 1)
- return unquote(id_)
+ return urllib.parse.unquote(id_)
def _serialize_request(self, request):
"""Convert an HttpRequest object into a string.
@@ -1333,8 +1327,8 @@
The request as a string in application/http format.
"""
# Construct status line
- parsed = urlparse(request.uri)
- request_line = urlunparse(
+ parsed = urllib.parse.urlparse(request.uri)
+ request_line = urllib.parse.urlunparse(
("", "", parsed.path, parsed.params, parsed.query, "")
)
status_line = request.method + " " + request_line + " HTTP/1.1\n"
@@ -1353,7 +1347,7 @@
if "content-type" in headers:
del headers["content-type"]
- for key, value in six.iteritems(headers):
+ for key, value in headers.items():
msg[key] = value
msg["Host"] = parsed.netloc
msg.set_unixfrom(None)
@@ -1363,7 +1357,7 @@
msg["content-length"] = str(len(request.body))
# Serialize the mime message.
- fp = StringIO()
+ fp = io.StringIO()
# maxheaderlen=0 means don't line wrap headers.
g = Generator(fp, maxheaderlen=0)
g.flatten(msg, unixfrom=False)
@@ -1488,7 +1482,7 @@
# encode the body: note that we can't use `as_string`, because
# it plays games with `From ` lines.
- fp = StringIO()
+ fp = io.StringIO()
g = Generator(fp, mangle_from_=False)
g.flatten(message, unixfrom=False)
body = fp.getvalue()
@@ -1509,8 +1503,7 @@
header = "content-type: %s\r\n\r\n" % resp["content-type"]
# PY3's FeedParser only accepts unicode. So we should decode content
# here, and encode each payload again.
- if six.PY3:
- content = content.decode("utf-8")
+ content = content.decode("utf-8")
for_parser = header + content
parser = FeedParser()
@@ -1526,7 +1519,7 @@
request_id = self._header_to_id(part["Content-ID"])
response, content = self._deserialize_response(part.get_payload())
# We encode content here to emulate normal http response.
- if isinstance(content, six.text_type):
+ if isinstance(content, str):
content = content.encode("utf-8")
self._responses[request_id] = (response, content)
@@ -1813,7 +1806,8 @@
# Remember the request so after the fact this mock can be examined
self.request_sequence.append((uri, method, body, headers))
resp, content = self._iterable.pop(0)
- content = six.ensure_binary(content)
+ if isinstance(content, str):
+ content = content.encode("utf-8")
if content == b"echo_request_headers":
content = headers
@@ -1826,7 +1820,7 @@
content = body
elif content == b"echo_request_uri":
content = uri
- if isinstance(content, six.text_type):
+ if isinstance(content, str):
content = content.encode("utf-8")
return httplib2.Response(resp), content
diff --git a/googleapiclient/mimeparse.py b/googleapiclient/mimeparse.py
index 6051628..a105667 100644
--- a/googleapiclient/mimeparse.py
+++ b/googleapiclient/mimeparse.py
@@ -23,7 +23,6 @@
"""
from __future__ import absolute_import
from functools import reduce
-import six
__version__ = "0.1.3"
__author__ = "Joe Gregorio"
@@ -105,7 +104,7 @@
lambda x, y: x + y,
[
1
- for (key, value) in six.iteritems(target_params)
+ for (key, value) in target_params.items()
if key != "q" and key in params and value == params[key]
],
0,
diff --git a/googleapiclient/model.py b/googleapiclient/model.py
index f58549c..b853a4f 100644
--- a/googleapiclient/model.py
+++ b/googleapiclient/model.py
@@ -20,7 +20,6 @@
object representation.
"""
from __future__ import absolute_import
-import six
__author__ = "jcgregorio@google.com (Joe Gregorio)"
@@ -28,8 +27,7 @@
import logging
import platform
import pkg_resources
-
-from six.moves.urllib.parse import urlencode
+import urllib
from googleapiclient.errors import HttpError
@@ -112,11 +110,11 @@
if dump_request_response:
LOGGER.info("--request-start--")
LOGGER.info("-headers-start-")
- for h, v in six.iteritems(headers):
+ for h, v in headers.items():
LOGGER.info("%s: %s", h, v)
LOGGER.info("-headers-end-")
LOGGER.info("-path-parameters-start-")
- for h, v in six.iteritems(path_params):
+ for h, v in path_params.items():
LOGGER.info("%s: %s", h, v)
LOGGER.info("-path-parameters-end-")
LOGGER.info("body: %s", body)
@@ -175,22 +173,22 @@
if self.alt_param is not None:
params.update({"alt": self.alt_param})
astuples = []
- for key, value in six.iteritems(params):
+ for key, value in params.items():
if type(value) == type([]):
for x in value:
x = x.encode("utf-8")
astuples.append((key, x))
else:
- if isinstance(value, six.text_type) and callable(value.encode):
+ if isinstance(value, str) and callable(value.encode):
value = value.encode("utf-8")
astuples.append((key, value))
- return "?" + urlencode(astuples)
+ return "?" + urllib.parse.urlencode(astuples)
def _log_response(self, resp, content):
"""Logs debugging information about the response if requested."""
if dump_request_response:
LOGGER.info("--response-start--")
- for h, v in six.iteritems(resp):
+ for h, v in resp.items():
LOGGER.info("%s: %s", h, v)
if content:
LOGGER.info(content)
@@ -385,7 +383,7 @@
body=makepatch(original, item)).execute()
"""
patch = {}
- for key, original_value in six.iteritems(original):
+ for key, original_value in original.items():
modified_value = modified.get(key, None)
if modified_value is None:
# Use None to signal that the element is deleted
diff --git a/googleapiclient/schema.py b/googleapiclient/schema.py
index 00f8588..95767ef 100644
--- a/googleapiclient/schema.py
+++ b/googleapiclient/schema.py
@@ -57,7 +57,6 @@
The constructor takes a discovery document in which to look up named schema.
"""
from __future__ import absolute_import
-import six
# TODO(jcgregorio) support format, enum, minimum, maximum
@@ -255,7 +254,7 @@
if "properties" in schema:
properties = schema.get("properties", {})
sorted_properties = OrderedDict(sorted(properties.items()))
- for pname, pschema in six.iteritems(sorted_properties):
+ for pname, pschema in sorted_properties.items():
self.emitBegin('"%s": ' % pname)
self._to_str_impl(pschema)
elif "additionalProperties" in schema:
diff --git a/samples/compute/create_instance.py b/samples/compute/create_instance.py
index 3ccffc5..f980769 100644
--- a/samples/compute/create_instance.py
+++ b/samples/compute/create_instance.py
@@ -27,7 +27,6 @@
import time
import googleapiclient.discovery
-from six.moves import input
# [START compute_apiary_list_instances]
diff --git a/setup.py b/setup.py
index 60bddf1..76d65ed 100644
--- a/setup.py
+++ b/setup.py
@@ -42,7 +42,6 @@
# Until this issue is closed
# https://github.com/googleapis/google-cloud-python/issues/10566
"google-api-core>=1.21.0,<3.0.0dev",
- "six>=1.13.0,<2dev",
"uritemplate>=3.0.0,<4dev",
]
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
index ccff147..0c0e7a2 100644
--- a/testing/constraints-3.6.txt
+++ b/testing/constraints-3.6.txt
@@ -9,5 +9,4 @@
google-auth==1.16.0
google-auth-httplib2==0.0.3
google-api-core==1.21.0
-six==1.13.0
uritemplate==3.0.0
\ No newline at end of file
diff --git a/tests/test__helpers.py b/tests/test__helpers.py
index 90c75ef..ab0bd4b 100644
--- a/tests/test__helpers.py
+++ b/tests/test__helpers.py
@@ -15,12 +15,9 @@
"""Unit tests for googleapiclient._helpers."""
import unittest
+import urllib
import mock
-
-import six
-from six.moves import urllib
-
from googleapiclient import _helpers
diff --git a/tests/test_discovery.py b/tests/test_discovery.py
index 1202b2d..9500fbf 100644
--- a/tests/test_discovery.py
+++ b/tests/test_discovery.py
@@ -21,16 +21,14 @@
Unit tests for objects created from discovery documents.
"""
from __future__ import absolute_import
-import six
__author__ = "jcgregorio@google.com (Joe Gregorio)"
-from six import BytesIO, StringIO
-from six.moves.urllib.parse import urlparse, parse_qs
-
+from collections import defaultdict
import copy
import datetime
import httplib2
+import io
import itertools
import json
import os
@@ -38,7 +36,7 @@
import re
import sys
import unittest2 as unittest
-from collections import defaultdict
+import urllib
from parameterized import parameterized
import mock
@@ -98,15 +96,15 @@
def assertUrisEqual(testcase, expected, actual):
"""Test that URIs are the same, up to reordering of query parameters."""
- expected = urlparse(expected)
- actual = urlparse(actual)
+ expected = urllib.parse.urlparse(expected)
+ actual = urllib.parse.urlparse(actual)
testcase.assertEqual(expected.scheme, actual.scheme)
testcase.assertEqual(expected.netloc, actual.netloc)
testcase.assertEqual(expected.path, actual.path)
testcase.assertEqual(expected.params, actual.params)
testcase.assertEqual(expected.fragment, actual.fragment)
- expected_query = parse_qs(expected.query)
- actual_query = parse_qs(actual.query)
+ expected_query = urllib.parse.parse_qs(expected.query)
+ actual_query = urllib.parse.parse_qs(actual.query)
for name in list(expected_query.keys()):
testcase.assertEqual(expected_query[name], actual_query[name])
for name in list(actual_query.keys()):
@@ -175,7 +173,7 @@
STACK_QUERY_PARAMETER_DEFAULT_VALUE, parameters[param_name]
)
- for param_name, value in six.iteritems(root_desc.get("parameters", {})):
+ for param_name, value in root_desc.get("parameters", {}).items():
self.assertEqual(value, parameters[param_name])
return parameters
@@ -1271,8 +1269,8 @@
self.assertTrue("unexpected" in str(e))
def _check_query_types(self, request):
- parsed = urlparse(request.uri)
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["q"], ["foo"])
self.assertEqual(q["i"], ["1"])
self.assertEqual(q["n"], ["1.0"])
@@ -1319,8 +1317,8 @@
zoo = build("zoo", "v1", http=http, static_discovery=False)
request = zoo.query(trace="html", fields="description")
- parsed = urlparse(request.uri)
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["trace"], ["html"])
self.assertEqual(q["fields"], ["description"])
@@ -1329,8 +1327,8 @@
zoo = build("zoo", "v1", http=http, static_discovery=False)
request = zoo.query(trace=None, fields="description")
- parsed = urlparse(request.uri)
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertFalse("trace" in q)
def test_model_added_query_parameters(self):
@@ -1338,8 +1336,8 @@
zoo = build("zoo", "v1", http=http, static_discovery=False)
request = zoo.animals().get(name="Lion")
- parsed = urlparse(request.uri)
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["alt"], ["json"])
self.assertEqual(request.headers["accept"], "application/json")
@@ -1348,8 +1346,8 @@
zoo = build("zoo", "v1", http=http, static_discovery=False)
request = zoo.animals().getmedia(name="Lion")
- parsed = urlparse(request.uri)
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertTrue("alt" not in q)
self.assertEqual(request.headers["accept"], "*/*")
@@ -1426,8 +1424,8 @@
self.assertTrue(getattr(zoo, "animals"))
request = zoo.animals().list(name="bat", projection="full")
- parsed = urlparse(request.uri)
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["name"], ["bat"])
self.assertEqual(q["projection"], ["full"])
@@ -1436,26 +1434,17 @@
zoo = build("zoo", "v1", http=self.http, static_discovery=False)
self.assertTrue(getattr(zoo, "animals"))
request = zoo.my().favorites().list(max_results="5")
- parsed = urlparse(request.uri)
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["max-results"], ["5"])
- @unittest.skipIf(six.PY3, "print is not a reserved name in Python 3")
- def test_methods_with_reserved_names(self):
- self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
- zoo = build("zoo", "v1", http=self.http)
- self.assertTrue(getattr(zoo, "animals"))
- request = zoo.global_().print_().assert_(max_results="5")
- parsed = urlparse(request.uri)
- self.assertEqual(parsed[2], "/zoo/v1/global/print/assert")
-
def test_top_level_functions(self):
self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
zoo = build("zoo", "v1", http=self.http, static_discovery=False)
self.assertTrue(getattr(zoo, "query"))
request = zoo.query(q="foo")
- parsed = urlparse(request.uri)
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["q"], ["foo"])
def test_simple_media_uploads(self):
@@ -1808,7 +1797,7 @@
zoo = build("zoo", "v1", http=self.http, static_discovery=False)
# Set up a seekable stream and try to upload in single chunk.
- fd = BytesIO(b'01234"56789"')
+ fd = io.BytesIO(b'01234"56789"')
media_upload = MediaIoBaseUpload(
fd=fd, mimetype="text/plain", chunksize=-1, resumable=True
)
@@ -1839,7 +1828,7 @@
zoo = build("zoo", "v1", http=self.http, static_discovery=False)
# Set up a seekable stream and try to upload in chunks.
- fd = BytesIO(b"0123456789")
+ fd = io.BytesIO(b"0123456789")
media_upload = MediaIoBaseUpload(
fd=fd, mimetype="text/plain", chunksize=5, resumable=True
)
@@ -1950,7 +1939,7 @@
self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
zoo = build("zoo", "v1", http=self.http, static_discovery=False)
- fd = BytesIO(b"data goes here")
+ fd = io.BytesIO(b"data goes here")
# Create an upload that doesn't know the full size of the media.
upload = MediaIoBaseUpload(
@@ -1975,7 +1964,7 @@
zoo = build("zoo", "v1", http=self.http, static_discovery=False)
# Create an upload that doesn't know the full size of the media.
- fd = BytesIO(b"data goes here")
+ fd = io.BytesIO(b"data goes here")
upload = MediaIoBaseUpload(
fd=fd, mimetype="image/png", chunksize=500, resumable=True
@@ -2152,8 +2141,8 @@
tasks = build("tasks", "v1", http=self.http)
request = tasks.tasklists().list()
next_request = tasks.tasklists().list_next(request, {"nextPageToken": "123abc"})
- parsed = list(urlparse(next_request.uri))
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(next_request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["pageToken"][0], "123abc")
def test_next_successful_with_next_page_token_alternate_name(self):
@@ -2161,8 +2150,8 @@
bigquery = build("bigquery", "v2", http=self.http)
request = bigquery.tabledata().list(datasetId="", projectId="", tableId="")
next_request = bigquery.tabledata().list_next(request, {"pageToken": "123abc"})
- parsed = list(urlparse(next_request.uri))
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(next_request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["pageToken"][0], "123abc")
def test_next_successful_with_next_page_token_in_body(self):
@@ -2192,8 +2181,8 @@
drive = build("drive", "v3", http=self.http)
request = drive.changes().list(pageToken="startPageToken")
next_request = drive.changes().list_next(request, {"nextPageToken": "123abc"})
- parsed = list(urlparse(next_request.uri))
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(next_request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["pageToken"][0], "123abc")
@@ -2203,8 +2192,8 @@
zoo = build("zoo", "v1", http=http, static_discovery=False)
request = zoo.animals().get_media(name="Lion")
- parsed = urlparse(request.uri)
- q = parse_qs(parsed[4])
+ parsed = urllib.parse.urlparse(request.uri)
+ q = urllib.parse.parse_qs(parsed.query)
self.assertEqual(q["alt"], ["media"])
self.assertEqual(request.headers["accept"], "*/*")
diff --git a/tests/test_http.py b/tests/test_http.py
index bfd9ba8..5484269 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -19,14 +19,10 @@
Unit tests for the googleapiclient.http.
"""
from __future__ import absolute_import
-from six.moves import range
__author__ = "jcgregorio@google.com (Joe Gregorio)"
-from six import PY3
-from six import BytesIO, StringIO
from io import FileIO
-from six.moves.urllib.parse import urlencode
# Do not remove the httplib2 import
import json
@@ -36,6 +32,7 @@
import mock
import os
import unittest2 as unittest
+import urllib
import random
import socket
import ssl
@@ -132,7 +129,7 @@
def request(self, *args, **kwargs):
if not self.num_errors:
return httplib2.Response(self.success_json), self.success_data
- elif self.num_errors == 5 and PY3:
+ elif self.num_errors == 5:
ex = ConnectionResetError # noqa: F821
elif self.num_errors == 4:
ex = httplib2.ServerNotFoundError()
@@ -149,11 +146,7 @@
ex.errno = socket.errno.WSAETIMEDOUT
except AttributeError:
# For Linux/Mac:
- if PY3:
- ex = socket.timeout()
- else:
- ex = OSError()
- ex.errno = socket.errno.ETIMEDOUT
+ ex = socket.timeout()
self.num_errors -= 1
raise ex
@@ -214,12 +207,8 @@
def test_media_file_upload_closes_fd_in___del__(self):
file_desc = mock.Mock(spec=io.TextIOWrapper)
opener = mock.mock_open(file_desc)
- if PY3:
- with mock.patch("builtins.open", return_value=opener):
- upload = MediaFileUpload(datafile("test_close"), mimetype="text/plain")
- else:
- with mock.patch("__builtin__.open", return_value=opener):
- upload = MediaFileUpload(datafile("test_close"), mimetype="text/plain")
+ with mock.patch("builtins.open", return_value=opener):
+ upload = MediaFileUpload(datafile("test_close"), mimetype="text/plain")
self.assertIs(upload.stream(), file_desc)
del upload
file_desc.close.assert_called_once_with()
@@ -338,25 +327,10 @@
except NotImplementedError:
pass
- @unittest.skipIf(PY3, "Strings and Bytes are different types")
- def test_media_io_base_upload_from_string_io(self):
- f = open(datafile("small.png"), "rb")
- fd = StringIO(f.read())
- f.close()
-
- upload = MediaIoBaseUpload(
- fd=fd, mimetype="image/png", chunksize=500, resumable=True
- )
- self.assertEqual("image/png", upload.mimetype())
- self.assertEqual(190, upload.size())
- self.assertEqual(True, upload.resumable())
- self.assertEqual(500, upload.chunksize())
- self.assertEqual(b"PNG", upload.getbytes(1, 3))
- f.close()
def test_media_io_base_upload_from_bytes(self):
f = open(datafile("small.png"), "rb")
- fd = BytesIO(f.read())
+ fd = io.BytesIO(f.read())
upload = MediaIoBaseUpload(
fd=fd, mimetype="image/png", chunksize=500, resumable=True
)
@@ -368,7 +342,7 @@
def test_media_io_base_upload_raises_on_invalid_chunksize(self):
f = open(datafile("small.png"), "rb")
- fd = BytesIO(f.read())
+ fd = io.BytesIO(f.read())
self.assertRaises(
InvalidChunkSizeError,
MediaIoBaseUpload,
@@ -379,7 +353,7 @@
)
def test_media_io_base_upload_streamable(self):
- fd = BytesIO(b"stuff")
+ fd = io.BytesIO(b"stuff")
upload = MediaIoBaseUpload(
fd=fd, mimetype="image/png", chunksize=500, resumable=True
)
@@ -388,7 +362,7 @@
def test_media_io_base_next_chunk_retries(self):
f = open(datafile("small.png"), "rb")
- fd = BytesIO(f.read())
+ fd = io.BytesIO(f.read())
upload = MediaIoBaseUpload(
fd=fd, mimetype="image/png", chunksize=500, resumable=True
)
@@ -423,7 +397,7 @@
self.assertEqual([20, 40, 80, 20, 40, 80], sleeptimes)
def test_media_io_base_next_chunk_no_retry_403_not_configured(self):
- fd = BytesIO(b"i am png")
+ fd = io.BytesIO(b"i am png")
upload = MediaIoBaseUpload(
fd=fd, mimetype="image/png", chunksize=500, resumable=True
)
@@ -448,7 +422,7 @@
def test_media_io_base_empty_file(self):
- fd = BytesIO()
+ fd = io.BytesIO()
upload = MediaIoBaseUpload(
fd=fd, mimetype="image/png", chunksize=500, resumable=True
)
@@ -479,7 +453,7 @@
http = HttpMock(datafile("zoo.json"), {"status": "200"})
zoo = build("zoo", "v1", http=http, static_discovery=False)
self.request = zoo.animals().get_media(name="Lion")
- self.fd = BytesIO()
+ self.fd = io.BytesIO()
def test_media_io_base_download(self):
self.request.http = HttpMockSequence(
@@ -544,7 +518,7 @@
self.assertEqual(result.get("Cache-Control"), "no-store")
- download._fd = self.fd = BytesIO()
+ download._fd = self.fd = io.BytesIO()
status, done = download.next_chunk()
result = json.loads(self.fd.getvalue().decode("utf-8"))
@@ -974,7 +948,7 @@
def test_retry_connection_errors_resumable(self):
with open(datafile("small.png"), "rb") as small_png_file:
- small_png_fd = BytesIO(small_png_file.read())
+ small_png_fd = io.BytesIO(small_png_file.read())
upload = MediaIoBaseUpload(
fd=small_png_fd, mimetype="image/png", chunksize=500, resumable=True
)
@@ -1609,7 +1583,7 @@
req = HttpRequest(
http,
_postproc,
- "http://example.com?" + urlencode(query),
+ "http://example.com?" + urllib.parse.urlencode(query),
method="GET",
body=None,
headers={},
@@ -1632,7 +1606,7 @@
"""Test _StreamSlice."""
def setUp(self):
- self.stream = BytesIO(b"0123456789")
+ self.stream = io.BytesIO(b"0123456789")
def test_read(self):
s = _StreamSlice(self.stream, 0, 4)
diff --git a/tests/test_json_model.py b/tests/test_json_model.py
index 6857803..416b7be 100644
--- a/tests/test_json_model.py
+++ b/tests/test_json_model.py
@@ -19,24 +19,22 @@
Unit tests for the JSON model.
"""
from __future__ import absolute_import
-import six
__author__ = "jcgregorio@google.com (Joe Gregorio)"
-import copy
+import httplib2
import json
-import os
import pkg_resources
import platform
import unittest2 as unittest
-import httplib2
+import urllib
+
import googleapiclient.model
+
from googleapiclient.errors import HttpError
from googleapiclient.model import JsonModel
-from six.moves.urllib.parse import parse_qs
-
_LIBRARY_VERSION = pkg_resources.get_distribution("google-api-python-client").version
@@ -130,14 +128,9 @@
self.assertEqual(headers["accept"], "application/json")
self.assertEqual(headers["content-type"], "application/json")
- query_dict = parse_qs(query[1:])
+ query_dict = urllib.parse.parse_qs(query[1:])
self.assertEqual(query_dict["foo"], ["1"])
- if six.PY3:
- # Python 3, no need to encode
- self.assertEqual(query_dict["bar"], [u"\N{COMET}"])
- else:
- # Python 2, encode string
- self.assertEqual(query_dict["bar"], [u"\N{COMET}".encode("utf-8")])
+ self.assertEqual(query_dict["bar"], [u"\N{COMET}"])
self.assertEqual(query_dict["baz"], ["fe", "fi", "fo", "fum"])
self.assertTrue("qux" not in query_dict)
self.assertEqual(body, "{}")
@@ -250,7 +243,7 @@
def __init__(self, items):
super(MockResponse, self).__init__()
self.status = items["status"]
- for key, value in six.iteritems(items):
+ for key, value in items.items():
self[key] = value
old_logging = googleapiclient.model.LOGGER
diff --git a/tests/test_protobuf_model.py b/tests/test_protobuf_model.py
index 78caf4e..4479b13 100644
--- a/tests/test_protobuf_model.py
+++ b/tests/test_protobuf_model.py
@@ -24,13 +24,9 @@
import unittest2 as unittest
import httplib2
-import googleapiclient.model
-from googleapiclient.errors import HttpError
from googleapiclient.model import ProtocolBufferModel
-from six.moves.urllib.parse import parse_qs
-
class MockProtocolBuffer(object):
def __init__(self, data=None):