fix: drop six dependency (#1452)

Fixes #1446 🦕
diff --git a/tests/test__helpers.py b/tests/test__helpers.py
index 90c75ef..ab0bd4b 100644
--- a/tests/test__helpers.py
+++ b/tests/test__helpers.py
@@ -15,12 +15,9 @@
 """Unit tests for googleapiclient._helpers."""
 
 import unittest
+import urllib
 
 import mock
-
-import six
-from six.moves import urllib
-
 from googleapiclient import _helpers
 
 
diff --git a/tests/test_discovery.py b/tests/test_discovery.py
index 1202b2d..9500fbf 100644
--- a/tests/test_discovery.py
+++ b/tests/test_discovery.py
@@ -21,16 +21,14 @@
 Unit tests for objects created from discovery documents.
 """
 from __future__ import absolute_import
-import six
 
 __author__ = "jcgregorio@google.com (Joe Gregorio)"
 
-from six import BytesIO, StringIO
-from six.moves.urllib.parse import urlparse, parse_qs
-
+from collections import defaultdict
 import copy
 import datetime
 import httplib2
+import io
 import itertools
 import json
 import os
@@ -38,7 +36,7 @@
 import re
 import sys
 import unittest2 as unittest
-from collections import defaultdict
+import urllib
 
 from parameterized import parameterized
 import mock
@@ -98,15 +96,15 @@
 
 def assertUrisEqual(testcase, expected, actual):
     """Test that URIs are the same, up to reordering of query parameters."""
-    expected = urlparse(expected)
-    actual = urlparse(actual)
+    expected = urllib.parse.urlparse(expected)
+    actual = urllib.parse.urlparse(actual)
     testcase.assertEqual(expected.scheme, actual.scheme)
     testcase.assertEqual(expected.netloc, actual.netloc)
     testcase.assertEqual(expected.path, actual.path)
     testcase.assertEqual(expected.params, actual.params)
     testcase.assertEqual(expected.fragment, actual.fragment)
-    expected_query = parse_qs(expected.query)
-    actual_query = parse_qs(actual.query)
+    expected_query = urllib.parse.parse_qs(expected.query)
+    actual_query = urllib.parse.parse_qs(actual.query)
     for name in list(expected_query.keys()):
         testcase.assertEqual(expected_query[name], actual_query[name])
     for name in list(actual_query.keys()):
@@ -175,7 +173,7 @@
                 STACK_QUERY_PARAMETER_DEFAULT_VALUE, parameters[param_name]
             )
 
-        for param_name, value in six.iteritems(root_desc.get("parameters", {})):
+        for param_name, value in root_desc.get("parameters", {}).items():
             self.assertEqual(value, parameters[param_name])
 
         return parameters
@@ -1271,8 +1269,8 @@
             self.assertTrue("unexpected" in str(e))
 
     def _check_query_types(self, request):
-        parsed = urlparse(request.uri)
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["q"], ["foo"])
         self.assertEqual(q["i"], ["1"])
         self.assertEqual(q["n"], ["1.0"])
@@ -1319,8 +1317,8 @@
         zoo = build("zoo", "v1", http=http, static_discovery=False)
         request = zoo.query(trace="html", fields="description")
 
-        parsed = urlparse(request.uri)
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["trace"], ["html"])
         self.assertEqual(q["fields"], ["description"])
 
@@ -1329,8 +1327,8 @@
         zoo = build("zoo", "v1", http=http, static_discovery=False)
         request = zoo.query(trace=None, fields="description")
 
-        parsed = urlparse(request.uri)
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertFalse("trace" in q)
 
     def test_model_added_query_parameters(self):
@@ -1338,8 +1336,8 @@
         zoo = build("zoo", "v1", http=http, static_discovery=False)
         request = zoo.animals().get(name="Lion")
 
-        parsed = urlparse(request.uri)
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["alt"], ["json"])
         self.assertEqual(request.headers["accept"], "application/json")
 
@@ -1348,8 +1346,8 @@
         zoo = build("zoo", "v1", http=http, static_discovery=False)
         request = zoo.animals().getmedia(name="Lion")
 
-        parsed = urlparse(request.uri)
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertTrue("alt" not in q)
         self.assertEqual(request.headers["accept"], "*/*")
 
@@ -1426,8 +1424,8 @@
         self.assertTrue(getattr(zoo, "animals"))
 
         request = zoo.animals().list(name="bat", projection="full")
-        parsed = urlparse(request.uri)
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["name"], ["bat"])
         self.assertEqual(q["projection"], ["full"])
 
@@ -1436,26 +1434,17 @@
         zoo = build("zoo", "v1", http=self.http, static_discovery=False)
         self.assertTrue(getattr(zoo, "animals"))
         request = zoo.my().favorites().list(max_results="5")
-        parsed = urlparse(request.uri)
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["max-results"], ["5"])
 
-    @unittest.skipIf(six.PY3, "print is not a reserved name in Python 3")
-    def test_methods_with_reserved_names(self):
-        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
-        zoo = build("zoo", "v1", http=self.http)
-        self.assertTrue(getattr(zoo, "animals"))
-        request = zoo.global_().print_().assert_(max_results="5")
-        parsed = urlparse(request.uri)
-        self.assertEqual(parsed[2], "/zoo/v1/global/print/assert")
-
     def test_top_level_functions(self):
         self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
         zoo = build("zoo", "v1", http=self.http, static_discovery=False)
         self.assertTrue(getattr(zoo, "query"))
         request = zoo.query(q="foo")
-        parsed = urlparse(request.uri)
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["q"], ["foo"])
 
     def test_simple_media_uploads(self):
@@ -1808,7 +1797,7 @@
         zoo = build("zoo", "v1", http=self.http, static_discovery=False)
 
         # Set up a seekable stream and try to upload in single chunk.
-        fd = BytesIO(b'01234"56789"')
+        fd = io.BytesIO(b'01234"56789"')
         media_upload = MediaIoBaseUpload(
             fd=fd, mimetype="text/plain", chunksize=-1, resumable=True
         )
@@ -1839,7 +1828,7 @@
         zoo = build("zoo", "v1", http=self.http, static_discovery=False)
 
         # Set up a seekable stream and try to upload in chunks.
-        fd = BytesIO(b"0123456789")
+        fd = io.BytesIO(b"0123456789")
         media_upload = MediaIoBaseUpload(
             fd=fd, mimetype="text/plain", chunksize=5, resumable=True
         )
@@ -1950,7 +1939,7 @@
         self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
         zoo = build("zoo", "v1", http=self.http, static_discovery=False)
 
-        fd = BytesIO(b"data goes here")
+        fd = io.BytesIO(b"data goes here")
 
         # Create an upload that doesn't know the full size of the media.
         upload = MediaIoBaseUpload(
@@ -1975,7 +1964,7 @@
         zoo = build("zoo", "v1", http=self.http, static_discovery=False)
 
         # Create an upload that doesn't know the full size of the media.
-        fd = BytesIO(b"data goes here")
+        fd = io.BytesIO(b"data goes here")
 
         upload = MediaIoBaseUpload(
             fd=fd, mimetype="image/png", chunksize=500, resumable=True
@@ -2152,8 +2141,8 @@
         tasks = build("tasks", "v1", http=self.http)
         request = tasks.tasklists().list()
         next_request = tasks.tasklists().list_next(request, {"nextPageToken": "123abc"})
-        parsed = list(urlparse(next_request.uri))
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(next_request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["pageToken"][0], "123abc")
 
     def test_next_successful_with_next_page_token_alternate_name(self):
@@ -2161,8 +2150,8 @@
         bigquery = build("bigquery", "v2", http=self.http)
         request = bigquery.tabledata().list(datasetId="", projectId="", tableId="")
         next_request = bigquery.tabledata().list_next(request, {"pageToken": "123abc"})
-        parsed = list(urlparse(next_request.uri))
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(next_request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["pageToken"][0], "123abc")
 
     def test_next_successful_with_next_page_token_in_body(self):
@@ -2192,8 +2181,8 @@
         drive = build("drive", "v3", http=self.http)
         request = drive.changes().list(pageToken="startPageToken")
         next_request = drive.changes().list_next(request, {"nextPageToken": "123abc"})
-        parsed = list(urlparse(next_request.uri))
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(next_request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["pageToken"][0], "123abc")
 
 
@@ -2203,8 +2192,8 @@
         zoo = build("zoo", "v1", http=http, static_discovery=False)
         request = zoo.animals().get_media(name="Lion")
 
-        parsed = urlparse(request.uri)
-        q = parse_qs(parsed[4])
+        parsed = urllib.parse.urlparse(request.uri)
+        q = urllib.parse.parse_qs(parsed.query)
         self.assertEqual(q["alt"], ["media"])
         self.assertEqual(request.headers["accept"], "*/*")
 
diff --git a/tests/test_http.py b/tests/test_http.py
index bfd9ba8..5484269 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -19,14 +19,10 @@
 Unit tests for the googleapiclient.http.
 """
 from __future__ import absolute_import
-from six.moves import range
 
 __author__ = "jcgregorio@google.com (Joe Gregorio)"
 
-from six import PY3
-from six import BytesIO, StringIO
 from io import FileIO
-from six.moves.urllib.parse import urlencode
 
 # Do not remove the httplib2 import
 import json
@@ -36,6 +32,7 @@
 import mock
 import os
 import unittest2 as unittest
+import urllib
 import random
 import socket
 import ssl
@@ -132,7 +129,7 @@
     def request(self, *args, **kwargs):
         if not self.num_errors:
             return httplib2.Response(self.success_json), self.success_data
-        elif self.num_errors == 5 and PY3:
+        elif self.num_errors == 5:
             ex = ConnectionResetError  # noqa: F821
         elif self.num_errors == 4:
             ex = httplib2.ServerNotFoundError()
@@ -149,11 +146,7 @@
                 ex.errno = socket.errno.WSAETIMEDOUT
             except AttributeError:
                 # For Linux/Mac:
-                if PY3:
-                    ex = socket.timeout()
-                else:
-                    ex = OSError()
-                    ex.errno = socket.errno.ETIMEDOUT
+                ex = socket.timeout()
 
         self.num_errors -= 1
         raise ex
@@ -214,12 +207,8 @@
     def test_media_file_upload_closes_fd_in___del__(self):
         file_desc = mock.Mock(spec=io.TextIOWrapper)
         opener = mock.mock_open(file_desc)
-        if PY3:
-            with mock.patch("builtins.open", return_value=opener):
-                upload = MediaFileUpload(datafile("test_close"), mimetype="text/plain")
-        else:
-            with mock.patch("__builtin__.open", return_value=opener):
-                upload = MediaFileUpload(datafile("test_close"), mimetype="text/plain")
+        with mock.patch("builtins.open", return_value=opener):
+            upload = MediaFileUpload(datafile("test_close"), mimetype="text/plain")
         self.assertIs(upload.stream(), file_desc)
         del upload
         file_desc.close.assert_called_once_with()
@@ -338,25 +327,10 @@
         except NotImplementedError:
             pass
 
-    @unittest.skipIf(PY3, "Strings and Bytes are different types")
-    def test_media_io_base_upload_from_string_io(self):
-        f = open(datafile("small.png"), "rb")
-        fd = StringIO(f.read())
-        f.close()
-
-        upload = MediaIoBaseUpload(
-            fd=fd, mimetype="image/png", chunksize=500, resumable=True
-        )
-        self.assertEqual("image/png", upload.mimetype())
-        self.assertEqual(190, upload.size())
-        self.assertEqual(True, upload.resumable())
-        self.assertEqual(500, upload.chunksize())
-        self.assertEqual(b"PNG", upload.getbytes(1, 3))
-        f.close()
 
     def test_media_io_base_upload_from_bytes(self):
         f = open(datafile("small.png"), "rb")
-        fd = BytesIO(f.read())
+        fd = io.BytesIO(f.read())
         upload = MediaIoBaseUpload(
             fd=fd, mimetype="image/png", chunksize=500, resumable=True
         )
@@ -368,7 +342,7 @@
 
     def test_media_io_base_upload_raises_on_invalid_chunksize(self):
         f = open(datafile("small.png"), "rb")
-        fd = BytesIO(f.read())
+        fd = io.BytesIO(f.read())
         self.assertRaises(
             InvalidChunkSizeError,
             MediaIoBaseUpload,
@@ -379,7 +353,7 @@
         )
 
     def test_media_io_base_upload_streamable(self):
-        fd = BytesIO(b"stuff")
+        fd = io.BytesIO(b"stuff")
         upload = MediaIoBaseUpload(
             fd=fd, mimetype="image/png", chunksize=500, resumable=True
         )
@@ -388,7 +362,7 @@
 
     def test_media_io_base_next_chunk_retries(self):
         f = open(datafile("small.png"), "rb")
-        fd = BytesIO(f.read())
+        fd = io.BytesIO(f.read())
         upload = MediaIoBaseUpload(
             fd=fd, mimetype="image/png", chunksize=500, resumable=True
         )
@@ -423,7 +397,7 @@
         self.assertEqual([20, 40, 80, 20, 40, 80], sleeptimes)
 
     def test_media_io_base_next_chunk_no_retry_403_not_configured(self):
-        fd = BytesIO(b"i am png")
+        fd = io.BytesIO(b"i am png")
         upload = MediaIoBaseUpload(
             fd=fd, mimetype="image/png", chunksize=500, resumable=True
         )
@@ -448,7 +422,7 @@
 
 
     def test_media_io_base_empty_file(self):
-        fd = BytesIO()
+        fd = io.BytesIO()
         upload = MediaIoBaseUpload(
             fd=fd, mimetype="image/png", chunksize=500, resumable=True
         )
@@ -479,7 +453,7 @@
         http = HttpMock(datafile("zoo.json"), {"status": "200"})
         zoo = build("zoo", "v1", http=http, static_discovery=False)
         self.request = zoo.animals().get_media(name="Lion")
-        self.fd = BytesIO()
+        self.fd = io.BytesIO()
 
     def test_media_io_base_download(self):
         self.request.http = HttpMockSequence(
@@ -544,7 +518,7 @@
 
         self.assertEqual(result.get("Cache-Control"), "no-store")
 
-        download._fd = self.fd = BytesIO()
+        download._fd = self.fd = io.BytesIO()
         status, done = download.next_chunk()
 
         result = json.loads(self.fd.getvalue().decode("utf-8"))
@@ -974,7 +948,7 @@
 
     def test_retry_connection_errors_resumable(self):
         with open(datafile("small.png"), "rb") as small_png_file:
-            small_png_fd = BytesIO(small_png_file.read())
+            small_png_fd = io.BytesIO(small_png_file.read())
         upload = MediaIoBaseUpload(
             fd=small_png_fd, mimetype="image/png", chunksize=500, resumable=True
         )
@@ -1609,7 +1583,7 @@
         req = HttpRequest(
             http,
             _postproc,
-            "http://example.com?" + urlencode(query),
+            "http://example.com?" + urllib.parse.urlencode(query),
             method="GET",
             body=None,
             headers={},
@@ -1632,7 +1606,7 @@
     """Test _StreamSlice."""
 
     def setUp(self):
-        self.stream = BytesIO(b"0123456789")
+        self.stream = io.BytesIO(b"0123456789")
 
     def test_read(self):
         s = _StreamSlice(self.stream, 0, 4)
diff --git a/tests/test_json_model.py b/tests/test_json_model.py
index 6857803..416b7be 100644
--- a/tests/test_json_model.py
+++ b/tests/test_json_model.py
@@ -19,24 +19,22 @@
 Unit tests for the JSON model.
 """
 from __future__ import absolute_import
-import six
 
 __author__ = "jcgregorio@google.com (Joe Gregorio)"
 
-import copy
+import httplib2
 import json
-import os
 import pkg_resources
 import platform
 import unittest2 as unittest
-import httplib2
+import urllib
+
 import googleapiclient.model
 
+
 from googleapiclient.errors import HttpError
 from googleapiclient.model import JsonModel
 
-from six.moves.urllib.parse import parse_qs
-
 _LIBRARY_VERSION = pkg_resources.get_distribution("google-api-python-client").version
 
 
@@ -130,14 +128,9 @@
         self.assertEqual(headers["accept"], "application/json")
         self.assertEqual(headers["content-type"], "application/json")
 
-        query_dict = parse_qs(query[1:])
+        query_dict = urllib.parse.parse_qs(query[1:])
         self.assertEqual(query_dict["foo"], ["1"])
-        if six.PY3:
-            # Python 3, no need to encode
-            self.assertEqual(query_dict["bar"], [u"\N{COMET}"])
-        else:
-            # Python 2, encode string
-            self.assertEqual(query_dict["bar"], [u"\N{COMET}".encode("utf-8")])
+        self.assertEqual(query_dict["bar"], [u"\N{COMET}"])
         self.assertEqual(query_dict["baz"], ["fe", "fi", "fo", "fum"])
         self.assertTrue("qux" not in query_dict)
         self.assertEqual(body, "{}")
@@ -250,7 +243,7 @@
             def __init__(self, items):
                 super(MockResponse, self).__init__()
                 self.status = items["status"]
-                for key, value in six.iteritems(items):
+                for key, value in items.items():
                     self[key] = value
 
         old_logging = googleapiclient.model.LOGGER
diff --git a/tests/test_protobuf_model.py b/tests/test_protobuf_model.py
index 78caf4e..4479b13 100644
--- a/tests/test_protobuf_model.py
+++ b/tests/test_protobuf_model.py
@@ -24,13 +24,9 @@
 
 import unittest2 as unittest
 import httplib2
-import googleapiclient.model
 
-from googleapiclient.errors import HttpError
 from googleapiclient.model import ProtocolBufferModel
 
-from six.moves.urllib.parse import parse_qs
-
 
 class MockProtocolBuffer(object):
     def __init__(self, data=None):