blacken api_core and core (#6668)

* blacken api_core and core
diff --git a/.flake8 b/.flake8
index 3db9b73..3da787c 100644
--- a/.flake8
+++ b/.flake8
@@ -3,6 +3,7 @@
 # Note: this forces all google imports to be in the third group. See
 # https://github.com/PyCQA/flake8-import-order/issues/111
 application-import-names=google
+ignore = E203, E266, E501, W503
 exclude =
   __pycache__,
   .git,
diff --git a/google/__init__.py b/google/__init__.py
index e338417..0d0a4c3 100644
--- a/google/__init__.py
+++ b/google/__init__.py
@@ -16,7 +16,9 @@
 
 try:
     import pkg_resources
+
     pkg_resources.declare_namespace(__name__)
 except ImportError:
     import pkgutil
+
     __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/google/api_core/__init__.py b/google/api_core/__init__.py
index 35d1238..c762e18 100644
--- a/google/api_core/__init__.py
+++ b/google/api_core/__init__.py
@@ -20,4 +20,4 @@
 from pkg_resources import get_distribution
 
 
-__version__ = get_distribution('google-api-core').version
+__version__ = get_distribution("google-api-core").version
diff --git a/google/api_core/bidi.py b/google/api_core/bidi.py
index 4d77ecc..795a8d2 100644
--- a/google/api_core/bidi.py
+++ b/google/api_core/bidi.py
@@ -22,7 +22,7 @@
 from google.api_core import exceptions
 
 _LOGGER = logging.getLogger(__name__)
-_BIDIRECTIONAL_CONSUMER_NAME = 'Thread-ConsumeBidirectionalStream'
+_BIDIRECTIONAL_CONSUMER_NAME = "Thread-ConsumeBidirectionalStream"
 
 
 class _RequestQueueGenerator(object):
@@ -79,6 +79,7 @@
             easily restarting streams that require some initial configuration
             request.
     """
+
     def __init__(self, queue, period=1, initial_request=None):
         self._queue = queue
         self._period = period
@@ -107,8 +108,8 @@
             except queue.Empty:
                 if not self._is_active():
                     _LOGGER.debug(
-                        'Empty queue and inactive call, exiting request '
-                        'generator.')
+                        "Empty queue and inactive call, exiting request " "generator."
+                    )
                     return
                 else:
                     # call is still active, keep waiting for queue items.
@@ -117,7 +118,7 @@
             # The consumer explicitly sent "None", indicating that the request
             # should end.
             if item is None:
-                _LOGGER.debug('Cleanly exiting request generator.')
+                _LOGGER.debug("Cleanly exiting request generator.")
                 return
 
             if not self._is_active():
@@ -125,8 +126,9 @@
                 # item back on the queue so that the next call can consume it.
                 self._queue.put(item)
                 _LOGGER.debug(
-                    'Inactive call, replacing item on queue and exiting '
-                    'request generator.')
+                    "Inactive call, replacing item on queue and exiting "
+                    "request generator."
+                )
                 return
 
             yield item
@@ -164,6 +166,7 @@
             yield. This is useful if an initial request is needed to start the
             stream.
     """
+
     def __init__(self, start_rpc, initial_request=None):
         self._start_rpc = start_rpc
         self._initial_request = initial_request
@@ -192,17 +195,18 @@
     def open(self):
         """Opens the stream."""
         if self.is_active:
-            raise ValueError('Can not open an already open stream.')
+            raise ValueError("Can not open an already open stream.")
 
         request_generator = _RequestQueueGenerator(
-            self._request_queue, initial_request=self._initial_request)
+            self._request_queue, initial_request=self._initial_request
+        )
         call = self._start_rpc(iter(request_generator))
 
         request_generator.call = call
 
         # TODO: api_core should expose the future interface for wrapped
         # callables as well.
-        if hasattr(call, '_wrapped'):  # pragma: NO COVER
+        if hasattr(call, "_wrapped"):  # pragma: NO COVER
             call._wrapped.add_done_callback(self._on_call_done)
         else:
             call.add_done_callback(self._on_call_done)
@@ -232,8 +236,7 @@
             request (protobuf.Message): The request to send.
         """
         if self.call is None:
-            raise ValueError(
-                'Can not send() on an RPC that has never been open()ed.')
+            raise ValueError("Can not send() on an RPC that has never been open()ed.")
 
         # Don't use self.is_active(), as ResumableBidiRpc will overload it
         # to mean something semantically different.
@@ -254,8 +257,7 @@
             protobuf.Message: The received message.
         """
         if self.call is None:
-            raise ValueError(
-                'Can not recv() on an RPC that has never been open()ed.')
+            raise ValueError("Can not recv() on an RPC that has never been open()ed.")
 
         return next(self.call)
 
@@ -309,6 +311,7 @@
             True if the stream should be recovered. This will be called
             whenever an error is encountered on the stream.
     """
+
     def __init__(self, start_rpc, should_recover, initial_request=None):
         super(ResumableBidiRpc, self).__init__(start_rpc, initial_request)
         self._should_recover = should_recover
@@ -334,14 +337,14 @@
             if not self._should_recover(future):
                 self._finalize(future)
             else:
-                _LOGGER.debug('Re-opening stream from gRPC callback.')
+                _LOGGER.debug("Re-opening stream from gRPC callback.")
                 self._reopen()
 
     def _reopen(self):
         with self._operational_lock:
             # Another thread already managed to re-open this stream.
             if self.call is not None and self.call.is_active():
-                _LOGGER.debug('Stream was already re-established.')
+                _LOGGER.debug("Stream was already re-established.")
                 return
 
             self.call = None
@@ -362,11 +365,11 @@
             # If re-opening or re-calling the method fails for any reason,
             # consider it a terminal error and finalize the stream.
             except Exception as exc:
-                _LOGGER.debug('Failed to re-open stream due to %s', exc)
+                _LOGGER.debug("Failed to re-open stream due to %s", exc)
                 self._finalize(exc)
                 raise
 
-            _LOGGER.info('Re-established stream')
+            _LOGGER.info("Re-established stream")
 
     def _recoverable(self, method, *args, **kwargs):
         """Wraps a method to recover the stream and retry on error.
@@ -388,18 +391,15 @@
 
             except Exception as exc:
                 with self._operational_lock:
-                    _LOGGER.debug(
-                        'Call to retryable %r caused %s.', method, exc)
+                    _LOGGER.debug("Call to retryable %r caused %s.", method, exc)
 
                     if not self._should_recover(exc):
                         self.close()
-                        _LOGGER.debug(
-                            'Not retrying %r due to %s.', method, exc)
+                        _LOGGER.debug("Not retrying %r due to %s.", method, exc)
                         self._finalize(exc)
                         raise exc
 
-                    _LOGGER.debug(
-                        'Re-opening stream from retryable %r.', method)
+                    _LOGGER.debug("Re-opening stream from retryable %r.", method)
                     self._reopen()
 
     def _send(self, request):
@@ -414,8 +414,7 @@
             call = self.call
 
         if call is None:
-            raise ValueError(
-                'Can not send() on an RPC that has never been open()ed.')
+            raise ValueError("Can not send() on an RPC that has never been open()ed.")
 
         # Don't use self.is_active(), as ResumableBidiRpc will overload it
         # to mean something semantically different.
@@ -434,8 +433,7 @@
             call = self.call
 
         if call is None:
-            raise ValueError(
-                'Can not recv() on an RPC that has never been open()ed.')
+            raise ValueError("Can not recv() on an RPC that has never been open()ed.")
 
         return next(call)
 
@@ -493,6 +491,7 @@
         on_response (Callable[[protobuf.Message], None]): The callback to
             be called for every response on the stream.
     """
+
     def __init__(self, bidi_rpc, on_response):
         self._bidi_rpc = bidi_rpc
         self._on_response = on_response
@@ -522,43 +521,47 @@
                 # Python 2.7.
                 with self._wake:
                     if self._paused:
-                        _LOGGER.debug('paused, waiting for waking.')
+                        _LOGGER.debug("paused, waiting for waking.")
                         self._wake.wait()
-                        _LOGGER.debug('woken.')
+                        _LOGGER.debug("woken.")
 
-                _LOGGER.debug('waiting for recv.')
+                _LOGGER.debug("waiting for recv.")
                 response = self._bidi_rpc.recv()
-                _LOGGER.debug('recved response.')
+                _LOGGER.debug("recved response.")
                 self._on_response(response)
 
         except exceptions.GoogleAPICallError as exc:
             _LOGGER.debug(
-                '%s caught error %s and will exit. Generally this is due to '
-                'the RPC itself being cancelled and the error will be '
-                'surfaced to the calling code.',
-                _BIDIRECTIONAL_CONSUMER_NAME, exc, exc_info=True)
+                "%s caught error %s and will exit. Generally this is due to "
+                "the RPC itself being cancelled and the error will be "
+                "surfaced to the calling code.",
+                _BIDIRECTIONAL_CONSUMER_NAME,
+                exc,
+                exc_info=True,
+            )
 
         except Exception as exc:
             _LOGGER.exception(
-                '%s caught unexpected exception %s and will exit.',
-                _BIDIRECTIONAL_CONSUMER_NAME, exc)
+                "%s caught unexpected exception %s and will exit.",
+                _BIDIRECTIONAL_CONSUMER_NAME,
+                exc,
+            )
 
         else:
-            _LOGGER.error(
-                'The bidirectional RPC exited.')
+            _LOGGER.error("The bidirectional RPC exited.")
 
-        _LOGGER.info('%s exiting', _BIDIRECTIONAL_CONSUMER_NAME)
+        _LOGGER.info("%s exiting", _BIDIRECTIONAL_CONSUMER_NAME)
 
     def start(self):
         """Start the background thread and begin consuming the thread."""
         with self._operational_lock:
             thread = threading.Thread(
-                name=_BIDIRECTIONAL_CONSUMER_NAME,
-                target=self._thread_main)
+                name=_BIDIRECTIONAL_CONSUMER_NAME, target=self._thread_main
+            )
             thread.daemon = True
             thread.start()
             self._thread = thread
-            _LOGGER.debug('Started helper thread %s', thread.name)
+            _LOGGER.debug("Started helper thread %s", thread.name)
 
     def stop(self):
         """Stop consuming the stream and shutdown the background thread."""
diff --git a/google/api_core/datetime_helpers.py b/google/api_core/datetime_helpers.py
index 393d2d6..3f3523b 100644
--- a/google/api_core/datetime_helpers.py
+++ b/google/api_core/datetime_helpers.py
@@ -22,10 +22,11 @@
 
 
 _UTC_EPOCH = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc)
-_RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ'
-_RFC3339_NO_FRACTION = '%Y-%m-%dT%H:%M:%S'
+_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
+_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
 # datetime.strptime cannot handle nanosecond precision:  parse w/ regex
-_RFC3339_NANOS = re.compile(r"""
+_RFC3339_NANOS = re.compile(
+    r"""
     (?P<no_fraction>
         \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}  # YYYY-MM-DDTHH:MM:SS
     )
@@ -34,7 +35,9 @@
      (?P<nanos>\d{1,9})                      # nanoseconds, maybe truncated
     )?
     Z                                        # Zulu
-""", re.VERBOSE)
+""",
+    re.VERBOSE,
+)
 
 
 def utcnow():
@@ -94,7 +97,7 @@
     Returns:
         datetime.date: A date equivalent to the date string.
     """
-    return datetime.datetime.strptime(value, '%Y-%m-%d').date()
+    return datetime.datetime.strptime(value, "%Y-%m-%d").date()
 
 
 def from_iso8601_time(value):
@@ -106,7 +109,7 @@
     Returns:
         datetime.time: A time equivalent to the time string.
     """
-    return datetime.datetime.strptime(value, '%H:%M:%S').time()
+    return datetime.datetime.strptime(value, "%H:%M:%S").time()
 
 
 def from_rfc3339(value):
@@ -119,8 +122,7 @@
         datetime.datetime: The datetime object equivalent to the timestamp in
             UTC.
     """
-    return datetime.datetime.strptime(
-        value, _RFC3339_MICROS).replace(tzinfo=pytz.utc)
+    return datetime.datetime.strptime(value, _RFC3339_MICROS).replace(tzinfo=pytz.utc)
 
 
 def from_rfc3339_nanos(value):
@@ -145,12 +147,15 @@
 
     if with_nanos is None:
         raise ValueError(
-            'Timestamp: {!r}, does not match pattern: {!r}'.format(
-                value, _RFC3339_NANOS.pattern))
+            "Timestamp: {!r}, does not match pattern: {!r}".format(
+                value, _RFC3339_NANOS.pattern
+            )
+        )
 
     bare_seconds = datetime.datetime.strptime(
-        with_nanos.group('no_fraction'), _RFC3339_NO_FRACTION)
-    fraction = with_nanos.group('nanos')
+        with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
+    )
+    fraction = with_nanos.group("nanos")
 
     if fraction is None:
         micros = 0
@@ -186,19 +191,20 @@
 
     Nanosecond can be passed only as a keyword argument.
     """
-    __slots__ = ('_nanosecond',)
+
+    __slots__ = ("_nanosecond",)
 
     # pylint: disable=arguments-differ
     def __new__(cls, *args, **kw):
-        nanos = kw.pop('nanosecond', 0)
+        nanos = kw.pop("nanosecond", 0)
         if nanos > 0:
-            if 'microsecond' in kw:
-                raise TypeError(
-                    "Specify only one of 'microsecond' or 'nanosecond'")
-            kw['microsecond'] = nanos // 1000
+            if "microsecond" in kw:
+                raise TypeError("Specify only one of 'microsecond' or 'nanosecond'")
+            kw["microsecond"] = nanos // 1000
         inst = datetime.datetime.__new__(cls, *args, **kw)
         inst._nanosecond = nanos or 0
         return inst
+
     # pylint: disable=arguments-differ
 
     @property
@@ -214,8 +220,8 @@
         """
         if self._nanosecond == 0:
             return to_rfc3339(self)
-        nanos = str(self._nanosecond).rstrip('0')
-        return '{}.{}Z'.format(self.strftime(_RFC3339_NO_FRACTION), nanos)
+        nanos = str(self._nanosecond).rstrip("0")
+        return "{}.{}Z".format(self.strftime(_RFC3339_NO_FRACTION), nanos)
 
     @classmethod
     def from_rfc3339(cls, stamp):
@@ -234,16 +240,26 @@
         with_nanos = _RFC3339_NANOS.match(stamp)
         if with_nanos is None:
             raise ValueError(
-                'Timestamp: {}, does not match pattern: {}'.format(
-                    stamp, _RFC3339_NANOS.pattern))
+                "Timestamp: {}, does not match pattern: {}".format(
+                    stamp, _RFC3339_NANOS.pattern
+                )
+            )
         bare = datetime.datetime.strptime(
-            with_nanos.group('no_fraction'), _RFC3339_NO_FRACTION)
-        fraction = with_nanos.group('nanos')
+            with_nanos.group("no_fraction"), _RFC3339_NO_FRACTION
+        )
+        fraction = with_nanos.group("nanos")
         if fraction is None:
             nanos = 0
         else:
             scale = 9 - len(fraction)
             nanos = int(fraction) * (10 ** scale)
-        return cls(bare.year, bare.month, bare.day,
-                   bare.hour, bare.minute, bare.second,
-                   nanosecond=nanos, tzinfo=pytz.UTC)
+        return cls(
+            bare.year,
+            bare.month,
+            bare.day,
+            bare.hour,
+            bare.minute,
+            bare.second,
+            nanosecond=nanos,
+            tzinfo=pytz.UTC,
+        )
diff --git a/google/api_core/exceptions.py b/google/api_core/exceptions.py
index 5cd5ea9..eed4ee4 100644
--- a/google/api_core/exceptions.py
+++ b/google/api_core/exceptions.py
@@ -37,6 +37,7 @@
 
 class GoogleAPIError(Exception):
     """Base class for all exceptions raised by Google API Clients."""
+
     pass
 
 
@@ -49,6 +50,7 @@
         cause (Exception): The last exception raised when retring the
             function.
     """
+
     def __init__(self, message, cause):
         super(RetryError, self).__init__(message)
         self.message = message
@@ -60,11 +62,12 @@
         return self._cause
 
     def __str__(self):
-        return '{}, last exception: {}'.format(self.message, self.cause)
+        return "{}, last exception: {}".format(self.message, self.cause)
 
 
 class _GoogleAPICallErrorMeta(type):
     """Metaclass for registering GoogleAPICallError subclasses."""
+
     def __new__(mcs, name, bases, class_dict):
         cls = type.__new__(mcs, name, bases, class_dict)
         if cls.code is not None:
@@ -110,7 +113,7 @@
         self._response = response
 
     def __str__(self):
-        return '{} {}'.format(self.code, self.message)
+        return "{} {}".format(self.code, self.message)
 
     @property
     def errors(self):
@@ -134,16 +137,19 @@
 
 class MovedPermanently(Redirection):
     """Exception mapping a ``301 Moved Permanently`` response."""
+
     code = http_client.MOVED_PERMANENTLY
 
 
 class NotModified(Redirection):
     """Exception mapping a ``304 Not Modified`` response."""
+
     code = http_client.NOT_MODIFIED
 
 
 class TemporaryRedirect(Redirection):
     """Exception mapping a ``307 Temporary Redirect`` response."""
+
     code = http_client.TEMPORARY_REDIRECT
 
 
@@ -153,6 +159,7 @@
     .. note:: :attr:`http_client.PERMANENT_REDIRECT` is ``308``, but Google
         APIs differ in their use of this status code.
     """
+
     code = 308
 
 
@@ -162,109 +169,119 @@
 
 class BadRequest(ClientError):
     """Exception mapping a ``400 Bad Request`` response."""
+
     code = http_client.BAD_REQUEST
 
 
 class InvalidArgument(BadRequest):
     """Exception mapping a :attr:`grpc.StatusCode.INVALID_ARGUMENT` error."""
-    grpc_status_code = (
-        grpc.StatusCode.INVALID_ARGUMENT if grpc is not None else None)
+
+    grpc_status_code = grpc.StatusCode.INVALID_ARGUMENT if grpc is not None else None
 
 
 class FailedPrecondition(BadRequest):
     """Exception mapping a :attr:`grpc.StatusCode.FAILED_PRECONDITION`
     error."""
-    grpc_status_code = (
-        grpc.StatusCode.FAILED_PRECONDITION if grpc is not None else None)
+
+    grpc_status_code = grpc.StatusCode.FAILED_PRECONDITION if grpc is not None else None
 
 
 class OutOfRange(BadRequest):
     """Exception mapping a :attr:`grpc.StatusCode.OUT_OF_RANGE` error."""
-    grpc_status_code = (
-        grpc.StatusCode.OUT_OF_RANGE if grpc is not None else None)
+
+    grpc_status_code = grpc.StatusCode.OUT_OF_RANGE if grpc is not None else None
 
 
 class Unauthorized(ClientError):
     """Exception mapping a ``401 Unauthorized`` response."""
+
     code = http_client.UNAUTHORIZED
 
 
 class Unauthenticated(Unauthorized):
     """Exception mapping a :attr:`grpc.StatusCode.UNAUTHENTICATED` error."""
-    grpc_status_code = (
-        grpc.StatusCode.UNAUTHENTICATED if grpc is not None else None)
+
+    grpc_status_code = grpc.StatusCode.UNAUTHENTICATED if grpc is not None else None
 
 
 class Forbidden(ClientError):
     """Exception mapping a ``403 Forbidden`` response."""
+
     code = http_client.FORBIDDEN
 
 
 class PermissionDenied(Forbidden):
     """Exception mapping a :attr:`grpc.StatusCode.PERMISSION_DENIED` error."""
-    grpc_status_code = (
-        grpc.StatusCode.PERMISSION_DENIED if grpc is not None else None)
+
+    grpc_status_code = grpc.StatusCode.PERMISSION_DENIED if grpc is not None else None
 
 
 class NotFound(ClientError):
     """Exception mapping a ``404 Not Found`` response or a
     :attr:`grpc.StatusCode.NOT_FOUND` error."""
+
     code = http_client.NOT_FOUND
-    grpc_status_code = (
-        grpc.StatusCode.NOT_FOUND if grpc is not None else None)
+    grpc_status_code = grpc.StatusCode.NOT_FOUND if grpc is not None else None
 
 
 class MethodNotAllowed(ClientError):
     """Exception mapping a ``405 Method Not Allowed`` response."""
+
     code = http_client.METHOD_NOT_ALLOWED
 
 
 class Conflict(ClientError):
     """Exception mapping a ``409 Conflict`` response."""
+
     code = http_client.CONFLICT
 
 
 class AlreadyExists(Conflict):
     """Exception mapping a :attr:`grpc.StatusCode.ALREADY_EXISTS` error."""
-    grpc_status_code = (
-        grpc.StatusCode.ALREADY_EXISTS if grpc is not None else None)
+
+    grpc_status_code = grpc.StatusCode.ALREADY_EXISTS if grpc is not None else None
 
 
 class Aborted(Conflict):
     """Exception mapping a :attr:`grpc.StatusCode.ABORTED` error."""
-    grpc_status_code = (
-        grpc.StatusCode.ABORTED if grpc is not None else None)
+
+    grpc_status_code = grpc.StatusCode.ABORTED if grpc is not None else None
 
 
 class LengthRequired(ClientError):
     """Exception mapping a ``411 Length Required`` response."""
+
     code = http_client.LENGTH_REQUIRED
 
 
 class PreconditionFailed(ClientError):
     """Exception mapping a ``412 Precondition Failed`` response."""
+
     code = http_client.PRECONDITION_FAILED
 
 
 class RequestRangeNotSatisfiable(ClientError):
     """Exception mapping a ``416 Request Range Not Satisfiable`` response."""
+
     code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE
 
 
 class TooManyRequests(ClientError):
     """Exception mapping a ``429 Too Many Requests`` response."""
+
     # http_client does not define a constant for this in Python 2.
     code = 429
 
 
 class ResourceExhausted(TooManyRequests):
     """Exception mapping a :attr:`grpc.StatusCode.RESOURCE_EXHAUSTED` error."""
-    grpc_status_code = (
-        grpc.StatusCode.RESOURCE_EXHAUSTED if grpc is not None else None)
+
+    grpc_status_code = grpc.StatusCode.RESOURCE_EXHAUSTED if grpc is not None else None
 
 
 class Cancelled(ClientError):
     """Exception mapping a :attr:`grpc.StatusCode.CANCELLED` error."""
+
     # This maps to HTTP status code 499. See
     # https://github.com/googleapis/googleapis/blob/master/google/rpc\
     # /code.proto
@@ -279,50 +296,55 @@
 class InternalServerError(ServerError):
     """Exception mapping a ``500 Internal Server Error`` response. or a
     :attr:`grpc.StatusCode.INTERNAL` error."""
+
     code = http_client.INTERNAL_SERVER_ERROR
     grpc_status_code = grpc.StatusCode.INTERNAL if grpc is not None else None
 
 
 class Unknown(ServerError):
     """Exception mapping a :attr:`grpc.StatusCode.UNKNOWN` error."""
+
     grpc_status_code = grpc.StatusCode.UNKNOWN if grpc is not None else None
 
 
 class DataLoss(ServerError):
     """Exception mapping a :attr:`grpc.StatusCode.DATA_LOSS` error."""
+
     grpc_status_code = grpc.StatusCode.DATA_LOSS if grpc is not None else None
 
 
 class MethodNotImplemented(ServerError):
     """Exception mapping a ``501 Not Implemented`` response or a
     :attr:`grpc.StatusCode.UNIMPLEMENTED` error."""
+
     code = http_client.NOT_IMPLEMENTED
-    grpc_status_code = (
-        grpc.StatusCode.UNIMPLEMENTED if grpc is not None else None)
+    grpc_status_code = grpc.StatusCode.UNIMPLEMENTED if grpc is not None else None
 
 
 class BadGateway(ServerError):
     """Exception mapping a ``502 Bad Gateway`` response."""
+
     code = http_client.BAD_GATEWAY
 
 
 class ServiceUnavailable(ServerError):
     """Exception mapping a ``503 Service Unavailable`` response or a
     :attr:`grpc.StatusCode.UNAVAILABLE` error."""
+
     code = http_client.SERVICE_UNAVAILABLE
-    grpc_status_code = (
-        grpc.StatusCode.UNAVAILABLE if grpc is not None else None)
+    grpc_status_code = grpc.StatusCode.UNAVAILABLE if grpc is not None else None
 
 
 class GatewayTimeout(ServerError):
     """Exception mapping a ``504 Gateway Timeout`` response."""
+
     code = http_client.GATEWAY_TIMEOUT
 
 
 class DeadlineExceeded(GatewayTimeout):
     """Exception mapping a :attr:`grpc.StatusCode.DEADLINE_EXCEEDED` error."""
-    grpc_status_code = (
-        grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None)
+
+    grpc_status_code = grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None
 
 
 def exception_class_for_http_status(status_code):
@@ -373,18 +395,18 @@
     try:
         payload = response.json()
     except ValueError:
-        payload = {'error': {'message': response.text or 'unknown error'}}
+        payload = {"error": {"message": response.text or "unknown error"}}
 
-    error_message = payload.get('error', {}).get('message', 'unknown error')
-    errors = payload.get('error', {}).get('errors', ())
+    error_message = payload.get("error", {}).get("message", "unknown error")
+    errors = payload.get("error", {}).get("errors", ())
 
-    message = u'{method} {url}: {error}'.format(
-        method=response.request.method,
-        url=response.request.url,
-        error=error_message)
+    message = "{method} {url}: {error}".format(
+        method=response.request.method, url=response.request.url, error=error_message
+    )
 
     exception = from_http_status(
-        response.status_code, message, errors=errors, response=response)
+        response.status_code, message, errors=errors, response=response
+    )
     return exception
 
 
@@ -434,10 +456,7 @@
     """
     if isinstance(rpc_exc, grpc.Call):
         return from_grpc_status(
-            rpc_exc.code(),
-            rpc_exc.details(),
-            errors=(rpc_exc,),
-            response=rpc_exc)
+            rpc_exc.code(), rpc_exc.details(), errors=(rpc_exc,), response=rpc_exc
+        )
     else:
-        return GoogleAPICallError(
-            str(rpc_exc), errors=(rpc_exc,), response=rpc_exc)
+        return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc)
diff --git a/google/api_core/future/__init__.py b/google/api_core/future/__init__.py
index 8c75da7..3768b2c 100644
--- a/google/api_core/future/__init__.py
+++ b/google/api_core/future/__init__.py
@@ -16,6 +16,4 @@
 
 from google.api_core.future.base import Future
 
-__all__ = [
-    'Future',
-]
+__all__ = ["Future"]
diff --git a/google/api_core/future/_helpers.py b/google/api_core/future/_helpers.py
index 2f0136a..9e88ca9 100644
--- a/google/api_core/future/_helpers.py
+++ b/google/api_core/future/_helpers.py
@@ -36,4 +36,4 @@
     try:
         return callback(*args, **kwargs)
     except Exception:
-        _LOGGER.exception('Error while executing Future callback.')
+        _LOGGER.exception("Error while executing Future callback.")
diff --git a/google/api_core/future/polling.py b/google/api_core/future/polling.py
index 01adc21..5c16c49 100644
--- a/google/api_core/future/polling.py
+++ b/google/api_core/future/polling.py
@@ -25,6 +25,7 @@
 
 class _OperationNotComplete(Exception):
     """Private exception used for polling via retry."""
+
     pass
 
 
@@ -52,6 +53,7 @@
             is polled. Regardless of the retry's ``deadline``, it will be
             overridden by the ``timeout`` argument to :meth:`result`.
     """
+
     def __init__(self, retry=DEFAULT_RETRY):
         super(PollingFuture, self).__init__()
         self._retry = retry
@@ -99,8 +101,8 @@
             retry_(self._done_or_raise)()
         except exceptions.RetryError:
             raise concurrent.futures.TimeoutError(
-                'Operation did not complete within the designated '
-                'timeout.')
+                "Operation did not complete within the designated " "timeout."
+            )
 
     def result(self, timeout=None):
         """Get the result of the operation, blocking if necessary.
@@ -160,7 +162,8 @@
             # The polling thread will exit on its own as soon as the operation
             # is done.
             self._polling_thread = _helpers.start_daemon_thread(
-                target=self._blocking_poll)
+                target=self._blocking_poll
+            )
 
     def _invoke_callbacks(self, *args, **kwargs):
         """Invoke all done callbacks."""
diff --git a/google/api_core/gapic_v1/__init__.py b/google/api_core/gapic_v1/__init__.py
index 88270d8..e7a7a68 100644
--- a/google/api_core/gapic_v1/__init__.py
+++ b/google/api_core/gapic_v1/__init__.py
@@ -17,9 +17,4 @@
 from google.api_core.gapic_v1 import method
 from google.api_core.gapic_v1 import routing_header
 
-__all__ = [
-    'client_info',
-    'config',
-    'method',
-    'routing_header',
-]
+__all__ = ["client_info", "config", "method", "routing_header"]
diff --git a/google/api_core/gapic_v1/client_info.py b/google/api_core/gapic_v1/client_info.py
index 7feeaf1..66a4e4c 100644
--- a/google/api_core/gapic_v1/client_info.py
+++ b/google/api_core/gapic_v1/client_info.py
@@ -23,14 +23,14 @@
 import pkg_resources
 
 _PY_VERSION = platform.python_version()
-_API_CORE_VERSION = pkg_resources.get_distribution('google-api-core').version
+_API_CORE_VERSION = pkg_resources.get_distribution("google-api-core").version
 
 try:
-    _GRPC_VERSION = pkg_resources.get_distribution('grpcio').version
+    _GRPC_VERSION = pkg_resources.get_distribution("grpcio").version
 except pkg_resources.DistributionNotFound:  # pragma: NO COVER
     _GRPC_VERSION = None
 
-METRICS_METADATA_KEY = 'x-goog-api-client'
+METRICS_METADATA_KEY = "x-goog-api-client"
 
 
 class ClientInfo(object):
@@ -52,13 +52,15 @@
             by gapic or if additional functionality was built on top of
             a gapic client library.
     """
+
     def __init__(
-            self,
-            python_version=_PY_VERSION,
-            grpc_version=_GRPC_VERSION,
-            api_core_version=_API_CORE_VERSION,
-            gapic_version=None,
-            client_library_version=None):
+        self,
+        python_version=_PY_VERSION,
+        grpc_version=_GRPC_VERSION,
+        api_core_version=_API_CORE_VERSION,
+        gapic_version=None,
+        client_library_version=None,
+    ):
         self.python_version = python_version
         self.grpc_version = grpc_version
         self.api_core_version = api_core_version
@@ -69,18 +71,18 @@
         """Returns the user-agent string for this client info."""
         # Note: the order here is important as the internal metrics system
         # expects these items to be in specific locations.
-        ua = 'gl-python/{python_version} '
+        ua = "gl-python/{python_version} "
 
         if self.grpc_version is not None:
-            ua += 'grpc/{grpc_version} '
+            ua += "grpc/{grpc_version} "
 
-        ua += 'gax/{api_core_version} '
+        ua += "gax/{api_core_version} "
 
         if self.gapic_version is not None:
-            ua += 'gapic/{gapic_version} '
+            ua += "gapic/{gapic_version} "
 
         if self.client_library_version is not None:
-            ua += 'gccl/{client_library_version} '
+            ua += "gccl/{client_library_version} "
 
         return ua.format(**self.__dict__).strip()
 
diff --git a/google/api_core/gapic_v1/config.py b/google/api_core/gapic_v1/config.py
index e8bb47b..3a3eb15 100644
--- a/google/api_core/gapic_v1/config.py
+++ b/google/api_core/gapic_v1/config.py
@@ -42,8 +42,7 @@
         :func:`type`: The appropriate subclass of
             :class:`google.api_core.exceptions.GoogleAPICallError`.
     """
-    return exceptions.exception_class_for_grpc_status(
-        getattr(grpc.StatusCode, name))
+    return exceptions.exception_class_for_grpc_status(getattr(grpc.StatusCode, name))
 
 
 def _retry_from_retry_config(retry_params, retry_codes):
@@ -69,15 +68,15 @@
         google.api_core.retry.Retry: The default retry object for the method.
     """
     exception_classes = [
-        _exception_class_for_grpc_status_name(code) for code in retry_codes]
+        _exception_class_for_grpc_status_name(code) for code in retry_codes
+    ]
     return retry.Retry(
         retry.if_exception_type(*exception_classes),
-        initial=(
-            retry_params['initial_retry_delay_millis'] / _MILLIS_PER_SECOND),
-        maximum=(
-            retry_params['max_retry_delay_millis'] / _MILLIS_PER_SECOND),
-        multiplier=retry_params['retry_delay_multiplier'],
-        deadline=retry_params['total_timeout_millis'] / _MILLIS_PER_SECOND)
+        initial=(retry_params["initial_retry_delay_millis"] / _MILLIS_PER_SECOND),
+        maximum=(retry_params["max_retry_delay_millis"] / _MILLIS_PER_SECOND),
+        multiplier=retry_params["retry_delay_multiplier"],
+        deadline=retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND,
+    )
 
 
 def _timeout_from_retry_config(retry_params):
@@ -101,16 +100,14 @@
             the method.
     """
     return timeout.ExponentialTimeout(
-        initial=(
-            retry_params['initial_rpc_timeout_millis'] / _MILLIS_PER_SECOND),
-        maximum=(
-            retry_params['max_rpc_timeout_millis'] / _MILLIS_PER_SECOND),
-        multiplier=retry_params['rpc_timeout_multiplier'],
-        deadline=(
-            retry_params['total_timeout_millis'] / _MILLIS_PER_SECOND))
+        initial=(retry_params["initial_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
+        maximum=(retry_params["max_rpc_timeout_millis"] / _MILLIS_PER_SECOND),
+        multiplier=retry_params["rpc_timeout_multiplier"],
+        deadline=(retry_params["total_timeout_millis"] / _MILLIS_PER_SECOND),
+    )
 
 
-MethodConfig = collections.namedtuple('MethodConfig', ['retry', 'timeout'])
+MethodConfig = collections.namedtuple("MethodConfig", ["retry", "timeout"])
 
 
 def parse_method_configs(interface_config):
@@ -131,15 +128,15 @@
     # Grab all the retry codes
     retry_codes_map = {
         name: retry_codes
-        for name, retry_codes
-        in six.iteritems(interface_config.get('retry_codes', {}))
+        for name, retry_codes in six.iteritems(interface_config.get("retry_codes", {}))
     }
 
     # Grab all of the retry params
     retry_params_map = {
         name: retry_params
-        for name, retry_params
-        in six.iteritems(interface_config.get('retry_params', {}))
+        for name, retry_params in six.iteritems(
+            interface_config.get("retry_params", {})
+        )
     }
 
     # Iterate through all the API methods and create a flat MethodConfig
@@ -147,23 +144,24 @@
     method_configs = {}
 
     for method_name, method_params in six.iteritems(
-            interface_config.get('methods', {})):
-        retry_params_name = method_params.get('retry_params_name')
+        interface_config.get("methods", {})
+    ):
+        retry_params_name = method_params.get("retry_params_name")
 
         if retry_params_name is not None:
             retry_params = retry_params_map[retry_params_name]
             retry_ = _retry_from_retry_config(
-                retry_params,
-                retry_codes_map[method_params['retry_codes_name']])
+                retry_params, retry_codes_map[method_params["retry_codes_name"]]
+            )
             timeout_ = _timeout_from_retry_config(retry_params)
 
         # No retry config, so this is a non-retryable method.
         else:
             retry_ = None
             timeout_ = timeout.ConstantTimeout(
-                method_params['timeout_millis'] / _MILLIS_PER_SECOND)
+                method_params["timeout_millis"] / _MILLIS_PER_SECOND
+            )
 
-        method_configs[method_name] = MethodConfig(
-            retry=retry_, timeout=timeout_)
+        method_configs[method_name] = MethodConfig(retry=retry_, timeout=timeout_)
 
     return method_configs
diff --git a/google/api_core/gapic_v1/method.py b/google/api_core/gapic_v1/method.py
index 9c4cf03..49982c0 100644
--- a/google/api_core/gapic_v1/method.py
+++ b/google/api_core/gapic_v1/method.py
@@ -69,8 +69,11 @@
         # a non-default retry is specified, make sure the timeout's deadline
         # matches the retry's. This handles the case where the user leaves
         # the timeout default but specifies a lower deadline via the retry.
-        if (retry and retry is not DEFAULT
-                and isinstance(default_timeout, timeout.ExponentialTimeout)):
+        if (
+            retry
+            and retry is not DEFAULT
+            and isinstance(default_timeout, timeout.ExponentialTimeout)
+        ):
             return default_timeout.with_deadline(retry._deadline)
         else:
             return default_timeout
@@ -111,13 +114,14 @@
         # extract the retry and timeout params.
         timeout_ = _determine_timeout(
             self._timeout,
-            kwargs.pop('timeout', self._timeout),
+            kwargs.pop("timeout", self._timeout),
             # Use only the invocation-specified retry only for this, as we only
             # want to adjust the timeout deadline if the *user* specified
             # a different retry.
-            kwargs.get('retry', None))
+            kwargs.get("retry", None),
+        )
 
-        retry = kwargs.pop('retry', self._retry)
+        retry = kwargs.pop("retry", self._retry)
 
         if retry is DEFAULT:
             retry = self._retry
@@ -127,21 +131,24 @@
 
         # Add the user agent metadata to the call.
         if self._metadata is not None:
-            metadata = kwargs.get('metadata', [])
+            metadata = kwargs.get("metadata", [])
             # Due to the nature of invocation, None should be treated the same
             # as not specified.
             if metadata is None:
                 metadata = []
             metadata = list(metadata)
             metadata.extend(self._metadata)
-            kwargs['metadata'] = metadata
+            kwargs["metadata"] = metadata
 
         return wrapped_func(*args, **kwargs)
 
 
 def wrap_method(
-        func, default_retry=None, default_timeout=None,
-        client_info=client_info.DEFAULT_CLIENT_INFO):
+    func,
+    default_retry=None,
+    default_timeout=None,
+    client_info=client_info.DEFAULT_CLIENT_INFO,
+):
     """Wrap an RPC method with common behavior.
 
     This applies common error wrapping, retry, and timeout behavior a function.
@@ -230,5 +237,6 @@
 
     return general_helpers.wraps(func)(
         _GapicCallable(
-            func, default_retry, default_timeout,
-            metadata=user_agent_metadata))
+            func, default_retry, default_timeout, metadata=user_agent_metadata
+        )
+    )
diff --git a/google/api_core/gapic_v1/routing_header.py b/google/api_core/gapic_v1/routing_header.py
index fc88bb6..3fb12a6 100644
--- a/google/api_core/gapic_v1/routing_header.py
+++ b/google/api_core/gapic_v1/routing_header.py
@@ -24,7 +24,7 @@
 
 from six.moves.urllib.parse import urlencode
 
-ROUTING_METADATA_KEY = 'x-goog-request-params'
+ROUTING_METADATA_KEY = "x-goog-request-params"
 
 
 def to_routing_header(params):
@@ -39,11 +39,12 @@
     """
     if sys.version_info[0] < 3:
         # Python 2 does not have the "safe" parameter for urlencode.
-        return urlencode(params).replace('%2F', '/')
+        return urlencode(params).replace("%2F", "/")
     return urlencode(
         params,
         # Per Google API policy (go/api-url-encoding), / is not encoded.
-        safe='/')
+        safe="/",
+    )
 
 
 def to_grpc_metadata(params):
diff --git a/google/api_core/general_helpers.py b/google/api_core/general_helpers.py
index 2c23a5a..5661663 100644
--- a/google/api_core/general_helpers.py
+++ b/google/api_core/general_helpers.py
@@ -21,7 +21,7 @@
 
 # functools.partial objects lack several attributes present on real function
 # objects. In Python 2 wraps fails on this so use a restricted set instead.
-_PARTIAL_VALID_ASSIGNMENTS = ('__doc__',)
+_PARTIAL_VALID_ASSIGNMENTS = ("__doc__",)
 
 
 def wraps(wrapped):
diff --git a/google/api_core/grpc_helpers.py b/google/api_core/grpc_helpers.py
index b4ac9e0..4d63beb 100644
--- a/google/api_core/grpc_helpers.py
+++ b/google/api_core/grpc_helpers.py
@@ -28,15 +28,13 @@
 
 try:
     import grpc_gcp
+
     HAS_GRPC_GCP = True
 except ImportError:
     HAS_GRPC_GCP = False
 
 # The list of gRPC Callable interfaces that return iterators.
-_STREAM_WRAP_CLASSES = (
-    grpc.UnaryStreamMultiCallable,
-    grpc.StreamStreamMultiCallable,
-)
+_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
 
 
 def _patch_callable_name(callable_):
@@ -45,7 +43,7 @@
     gRPC callable lack the ``__name__`` attribute which causes
     :func:`functools.wraps` to error. This adds the attribute if needed.
     """
-    if not hasattr(callable_, '__name__'):
+    if not hasattr(callable_, "__name__"):
         callable_.__name__ = callable_.__class__.__name__
 
 
@@ -154,11 +152,9 @@
         return _wrap_unary_errors(callable_)
 
 
-def create_channel(target,
-                   credentials=None,
-                   scopes=None,
-                   ssl_credentials=None,
-                   **kwargs):
+def create_channel(
+    target, credentials=None, scopes=None, ssl_credentials=None, **kwargs
+):
     """Create a secure channel with credentials.
 
     Args:
@@ -181,13 +177,15 @@
         credentials, _ = google.auth.default(scopes=scopes)
     else:
         credentials = google.auth.credentials.with_scopes_if_required(
-            credentials, scopes)
+            credentials, scopes
+        )
 
     request = google.auth.transport.requests.Request()
 
     # Create the metadata plugin for inserting the authorization header.
     metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin(
-        credentials, request)
+        credentials, request
+    )
 
     # Create a set of grpc.CallCredentials using the metadata plugin.
     google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
@@ -197,7 +195,8 @@
 
     # Combine the ssl credentials and the authorization credentials.
     composite_credentials = grpc.composite_channel_credentials(
-        ssl_credentials, google_auth_credentials)
+        ssl_credentials, google_auth_credentials
+    )
 
     if HAS_GRPC_GCP:
         # If grpc_gcp module is available use grpc_gcp.secure_channel,
@@ -208,10 +207,10 @@
 
 
 _MethodCall = collections.namedtuple(
-    '_MethodCall', ('request', 'timeout', 'metadata', 'credentials'))
+    "_MethodCall", ("request", "timeout", "metadata", "credentials")
+)
 
-_ChannelRequest = collections.namedtuple(
-    '_ChannelRequest', ('method', 'request'))
+_ChannelRequest = collections.namedtuple("_ChannelRequest", ("method", "request"))
 
 
 class _CallableStub(object):
@@ -238,10 +237,8 @@
         request, timeout, metadata, and credentials."""
 
     def __call__(self, request, timeout=None, metadata=None, credentials=None):
-        self._channel.requests.append(
-            _ChannelRequest(self._method, request))
-        self.calls.append(
-            _MethodCall(request, timeout, metadata, credentials))
+        self._channel.requests.append(_ChannelRequest(self._method, request))
+        self.calls.append(_MethodCall(request, timeout, metadata, credentials))
         self.requests.append(request)
 
         response = self.response
@@ -250,8 +247,9 @@
                 response = next(self.responses)
             else:
                 raise ValueError(
-                    '{method}.response and {method}.responses are mutually '
-                    'exclusive.'.format(method=self._method))
+                    "{method}.response and {method}.responses are mutually "
+                    "exclusive.".format(method=self._method)
+                )
 
         if callable(response):
             return response(request)
@@ -262,8 +260,7 @@
         if response is not None:
             return response
 
-        raise ValueError(
-            'Method stub for "{}" has no response.'.format(self._method))
+        raise ValueError('Method stub for "{}" has no response.'.format(self._method))
 
 
 def _simplify_method_name(method):
@@ -279,7 +276,7 @@
     Returns:
         str: The simplified name of the method.
     """
-    return method.rsplit('/', 1).pop()
+    return method.rsplit("/", 1).pop()
 
 
 class ChannelStub(grpc.Channel):
@@ -356,27 +353,21 @@
         except KeyError:
             raise AttributeError
 
-    def unary_unary(
-            self, method,
-            request_serializer=None, response_deserializer=None):
+    def unary_unary(self, method, request_serializer=None, response_deserializer=None):
         """grpc.Channel.unary_unary implementation."""
         return self._stub_for_method(method)
 
-    def unary_stream(
-            self, method,
-            request_serializer=None, response_deserializer=None):
+    def unary_stream(self, method, request_serializer=None, response_deserializer=None):
         """grpc.Channel.unary_stream implementation."""
         return self._stub_for_method(method)
 
-    def stream_unary(
-            self, method,
-            request_serializer=None, response_deserializer=None):
+    def stream_unary(self, method, request_serializer=None, response_deserializer=None):
         """grpc.Channel.stream_unary implementation."""
         return self._stub_for_method(method)
 
     def stream_stream(
-            self, method,
-            request_serializer=None, response_deserializer=None):
+        self, method, request_serializer=None, response_deserializer=None
+    ):
         """grpc.Channel.stream_stream implementation."""
         return self._stub_for_method(method)
 
diff --git a/google/api_core/operation.py b/google/api_core/operation.py
index a97a137..4147c7b 100644
--- a/google/api_core/operation.py
+++ b/google/api_core/operation.py
@@ -68,8 +68,14 @@
     """
 
     def __init__(
-            self, operation, refresh, cancel,
-            result_type, metadata_type=None, retry=polling.DEFAULT_RETRY):
+        self,
+        operation,
+        refresh,
+        cancel,
+        result_type,
+        metadata_type=None,
+        retry=polling.DEFAULT_RETRY,
+    ):
         super(Operation, self).__init__(retry=retry)
         self._operation = operation
         self._refresh = refresh
@@ -88,11 +94,12 @@
     @property
     def metadata(self):
         """google.protobuf.Message: the current operation metadata."""
-        if not self._operation.HasField('metadata'):
+        if not self._operation.HasField("metadata"):
             return None
 
         return protobuf_helpers.from_any_pb(
-            self._metadata_type, self._operation.metadata)
+            self._metadata_type, self._operation.metadata
+        )
 
     def _set_result_from_operation(self):
         """Set the result or exception from the operation if it is complete."""
@@ -107,20 +114,23 @@
             if not self._operation.done or self._result_set:
                 return
 
-            if self._operation.HasField('response'):
+            if self._operation.HasField("response"):
                 response = protobuf_helpers.from_any_pb(
-                    self._result_type, self._operation.response)
+                    self._result_type, self._operation.response
+                )
                 self.set_result(response)
-            elif self._operation.HasField('error'):
+            elif self._operation.HasField("error"):
                 exception = exceptions.GoogleAPICallError(
                     self._operation.error.message,
                     errors=(self._operation.error,),
-                    response=self._operation)
+                    response=self._operation,
+                )
                 self.set_exception(exception)
             else:
                 exception = exceptions.GoogleAPICallError(
-                    'Unexpected state: Long-running operation had neither '
-                    'response nor error set.')
+                    "Unexpected state: Long-running operation had neither "
+                    "response nor error set."
+                )
                 self.set_exception(exception)
 
     def _refresh_and_update(self):
@@ -156,8 +166,10 @@
     def cancelled(self):
         """True if the operation was cancelled."""
         self._refresh_and_update()
-        return (self._operation.HasField('error') and
-                self._operation.error.code == code_pb2.CANCELLED)
+        return (
+            self._operation.HasField("error")
+            and self._operation.error.code == code_pb2.CANCELLED
+        )
 
 
 def _refresh_http(api_request, operation_name):
@@ -172,10 +184,9 @@
     Returns:
         google.longrunning.operations_pb2.Operation: The operation.
     """
-    path = 'operations/{}'.format(operation_name)
-    api_response = api_request(method='GET', path=path)
-    return json_format.ParseDict(
-        api_response, operations_pb2.Operation())
+    path = "operations/{}".format(operation_name)
+    api_response = api_request(method="GET", path=path)
+    return json_format.ParseDict(api_response, operations_pb2.Operation())
 
 
 def _cancel_http(api_request, operation_name):
@@ -187,8 +198,8 @@
             :meth:`google.cloud._http.Connection.api_request`.
         operation_name (str): The name of the operation.
     """
-    path = 'operations/{}:cancel'.format(operation_name)
-    api_request(method='POST', path=path)
+    path = "operations/{}:cancel".format(operation_name)
+    api_request(method="POST", path=path)
 
 
 def from_http_json(operation, api_request, result_type, **kwargs):
@@ -212,12 +223,9 @@
         ~.api_core.operation.Operation: The operation future to track the given
             operation.
     """
-    operation_proto = json_format.ParseDict(
-        operation, operations_pb2.Operation())
-    refresh = functools.partial(
-        _refresh_http, api_request, operation_proto.name)
-    cancel = functools.partial(
-        _cancel_http, api_request, operation_proto.name)
+    operation_proto = json_format.ParseDict(operation, operations_pb2.Operation())
+    refresh = functools.partial(_refresh_http, api_request, operation_proto.name)
+    cancel = functools.partial(_cancel_http, api_request, operation_proto.name)
     return Operation(operation_proto, refresh, cancel, result_type, **kwargs)
 
 
@@ -269,10 +277,8 @@
         ~.api_core.operation.Operation: The operation future to track the given
             operation.
     """
-    refresh = functools.partial(
-        _refresh_grpc, operations_stub, operation.name)
-    cancel = functools.partial(
-        _cancel_grpc, operations_stub, operation.name)
+    refresh = functools.partial(_refresh_grpc, operations_stub, operation.name)
+    cancel = functools.partial(_cancel_grpc, operations_stub, operation.name)
     return Operation(operation, refresh, cancel, result_type, **kwargs)
 
 
@@ -297,8 +303,6 @@
         ~.api_core.operation.Operation: The operation future to track the given
             operation.
     """
-    refresh = functools.partial(
-        operations_client.get_operation, operation.name)
-    cancel = functools.partial(
-        operations_client.cancel_operation, operation.name)
+    refresh = functools.partial(operations_client.get_operation, operation.name)
+    cancel = functools.partial(operations_client.cancel_operation, operation.name)
     return Operation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/google/api_core/operations_v1/__init__.py b/google/api_core/operations_v1/__init__.py
index 10478d1..f054956 100644
--- a/google/api_core/operations_v1/__init__.py
+++ b/google/api_core/operations_v1/__init__.py
@@ -16,6 +16,4 @@
 
 from google.api_core.operations_v1.operations_client import OperationsClient
 
-__all__ = [
-    'OperationsClient'
-]
+__all__ = ["OperationsClient"]
diff --git a/google/api_core/operations_v1/operations_client.py b/google/api_core/operations_v1/operations_client.py
index 3af6996..cd2923b 100644
--- a/google/api_core/operations_v1/operations_client.py
+++ b/google/api_core/operations_v1/operations_client.py
@@ -61,34 +61,38 @@
         # Create all wrapped methods using the interface configuration.
         # The interface config contains all of the default settings for retry
         # and timeout for each RPC method.
-        interfaces = client_config['interfaces']
-        interface_config = interfaces['google.longrunning.Operations']
+        interfaces = client_config["interfaces"]
+        interface_config = interfaces["google.longrunning.Operations"]
         method_configs = gapic_v1.config.parse_method_configs(interface_config)
 
         self._get_operation = gapic_v1.method.wrap_method(
             self.operations_stub.GetOperation,
-            default_retry=method_configs['GetOperation'].retry,
-            default_timeout=method_configs['GetOperation'].timeout)
+            default_retry=method_configs["GetOperation"].retry,
+            default_timeout=method_configs["GetOperation"].timeout,
+        )
 
         self._list_operations = gapic_v1.method.wrap_method(
             self.operations_stub.ListOperations,
-            default_retry=method_configs['ListOperations'].retry,
-            default_timeout=method_configs['ListOperations'].timeout)
+            default_retry=method_configs["ListOperations"].retry,
+            default_timeout=method_configs["ListOperations"].timeout,
+        )
 
         self._cancel_operation = gapic_v1.method.wrap_method(
             self.operations_stub.CancelOperation,
-            default_retry=method_configs['CancelOperation'].retry,
-            default_timeout=method_configs['CancelOperation'].timeout)
+            default_retry=method_configs["CancelOperation"].retry,
+            default_timeout=method_configs["CancelOperation"].timeout,
+        )
 
         self._delete_operation = gapic_v1.method.wrap_method(
             self.operations_stub.DeleteOperation,
-            default_retry=method_configs['DeleteOperation'].retry,
-            default_timeout=method_configs['DeleteOperation'].timeout)
+            default_retry=method_configs["DeleteOperation"].retry,
+            default_timeout=method_configs["DeleteOperation"].timeout,
+        )
 
     # Service calls
     def get_operation(
-            self, name,
-            retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT):
+        self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
+    ):
         """Gets the latest state of a long-running operation.
 
         Clients can use this method to poll the operation result at intervals
@@ -127,8 +131,12 @@
         return self._get_operation(request, retry=retry, timeout=timeout)
 
     def list_operations(
-            self, name, filter_,
-            retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT):
+        self,
+        name,
+        filter_,
+        retry=gapic_v1.method.DEFAULT,
+        timeout=gapic_v1.method.DEFAULT,
+    ):
         """
         Lists operations that match the specified filter in the request.
 
@@ -177,26 +185,25 @@
                 subclass will be raised.
         """
         # Create the request object.
-        request = operations_pb2.ListOperationsRequest(
-            name=name, filter=filter_)
+        request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
 
         # Create the method used to fetch pages
-        method = functools.partial(
-            self._list_operations, retry=retry, timeout=timeout)
+        method = functools.partial(self._list_operations, retry=retry, timeout=timeout)
 
         iterator = page_iterator.GRPCIterator(
             client=None,
             method=method,
             request=request,
-            items_field='operations',
-            request_token_field='page_token',
-            response_token_field='next_page_token')
+            items_field="operations",
+            request_token_field="page_token",
+            response_token_field="next_page_token",
+        )
 
         return iterator
 
     def cancel_operation(
-            self, name,
-            retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT):
+        self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
+    ):
         """Starts asynchronous cancellation on a long-running operation.
 
         The server makes a best effort to cancel the operation, but success is
@@ -241,8 +248,8 @@
         self._cancel_operation(request, retry=retry, timeout=timeout)
 
     def delete_operation(
-            self, name,
-            retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT):
+        self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
+    ):
         """Deletes a long-running operation.
 
         This method indicates that the client is no longer interested in the
diff --git a/google/api_core/operations_v1/operations_client_config.py b/google/api_core/operations_v1/operations_client_config.py
index a3849ee..6cf9575 100644
--- a/google/api_core/operations_v1/operations_client_config.py
+++ b/google/api_core/operations_v1/operations_client_config.py
@@ -18,11 +18,8 @@
     "interfaces": {
         "google.longrunning.Operations": {
             "retry_codes": {
-                "idempotent": [
-                    "DEADLINE_EXCEEDED",
-                    "UNAVAILABLE"
-                ],
-                "non_idempotent": []
+                "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+                "non_idempotent": [],
             },
             "retry_params": {
                 "default": {
@@ -32,31 +29,31 @@
                     "initial_rpc_timeout_millis": 20000,
                     "rpc_timeout_multiplier": 1.0,
                     "max_rpc_timeout_millis": 600000,
-                    "total_timeout_millis": 600000
+                    "total_timeout_millis": 600000,
                 }
             },
             "methods": {
                 "GetOperation": {
                     "timeout_millis": 60000,
                     "retry_codes_name": "idempotent",
-                    "retry_params_name": "default"
+                    "retry_params_name": "default",
                 },
                 "ListOperations": {
                     "timeout_millis": 60000,
                     "retry_codes_name": "idempotent",
-                    "retry_params_name": "default"
+                    "retry_params_name": "default",
                 },
                 "CancelOperation": {
                     "timeout_millis": 60000,
                     "retry_codes_name": "idempotent",
-                    "retry_params_name": "default"
+                    "retry_params_name": "default",
                 },
                 "DeleteOperation": {
                     "timeout_millis": 60000,
                     "retry_codes_name": "idempotent",
-                    "retry_params_name": "default"
-                }
-            }
+                    "retry_params_name": "default",
+                },
+            },
         }
     }
 }
diff --git a/google/api_core/page_iterator.py b/google/api_core/page_iterator.py
index 0cf03a6..3ac5904 100644
--- a/google/api_core/page_iterator.py
+++ b/google/api_core/page_iterator.py
@@ -154,8 +154,13 @@
         max_results (int): The maximum number of results to fetch.
     """
 
-    def __init__(self, client, item_to_value=_item_to_value_identity,
-                 page_token=None, max_results=None):
+    def __init__(
+        self,
+        client,
+        item_to_value=_item_to_value_identity,
+        page_token=None,
+        max_results=None,
+    ):
         self._started = False
         self.client = client
         """Optional[Any]: The client that created this iterator."""
@@ -190,7 +195,7 @@
             ValueError: If the iterator has already been started.
         """
         if self._started:
-            raise ValueError('Iterator has already started', self)
+            raise ValueError("Iterator has already started", self)
         self._started = True
         return self._page_iter(increment=True)
 
@@ -211,7 +216,7 @@
             ValueError: If the iterator has already been started.
         """
         if self._started:
-            raise ValueError('Iterator has already started', self)
+            raise ValueError("Iterator has already started", self)
         self._started = True
         return self._items_iter()
 
@@ -301,20 +306,29 @@
     .. autoattribute:: pages
     """
 
-    _DEFAULT_ITEMS_KEY = 'items'
-    _PAGE_TOKEN = 'pageToken'
-    _MAX_RESULTS = 'maxResults'
-    _NEXT_TOKEN = 'nextPageToken'
+    _DEFAULT_ITEMS_KEY = "items"
+    _PAGE_TOKEN = "pageToken"
+    _MAX_RESULTS = "maxResults"
+    _NEXT_TOKEN = "nextPageToken"
     _RESERVED_PARAMS = frozenset([_PAGE_TOKEN])
-    _HTTP_METHOD = 'GET'
+    _HTTP_METHOD = "GET"
 
-    def __init__(self, client, api_request, path, item_to_value,
-                 items_key=_DEFAULT_ITEMS_KEY,
-                 page_token=None, max_results=None, extra_params=None,
-                 page_start=_do_nothing_page_start, next_token=_NEXT_TOKEN):
+    def __init__(
+        self,
+        client,
+        api_request,
+        path,
+        item_to_value,
+        items_key=_DEFAULT_ITEMS_KEY,
+        page_token=None,
+        max_results=None,
+        extra_params=None,
+        page_start=_do_nothing_page_start,
+        next_token=_NEXT_TOKEN,
+    ):
         super(HTTPIterator, self).__init__(
-            client, item_to_value, page_token=page_token,
-            max_results=max_results)
+            client, item_to_value, page_token=page_token, max_results=max_results
+        )
         self.api_request = api_request
         self.path = path
         self._items_key = items_key
@@ -332,11 +346,9 @@
         Raises:
             ValueError: If a reserved parameter is used.
         """
-        reserved_in_use = self._RESERVED_PARAMS.intersection(
-            self.extra_params)
+        reserved_in_use = self._RESERVED_PARAMS.intersection(self.extra_params)
         if reserved_in_use:
-            raise ValueError('Using a reserved parameter',
-                             reserved_in_use)
+            raise ValueError("Using a reserved parameter", reserved_in_use)
 
     def _next_page(self):
         """Get the next page in the iterator.
@@ -394,18 +406,16 @@
             ValueError: If the HTTP method is not ``GET`` or ``POST``.
         """
         params = self._get_query_params()
-        if self._HTTP_METHOD == 'GET':
+        if self._HTTP_METHOD == "GET":
             return self.api_request(
-                method=self._HTTP_METHOD,
-                path=self.path,
-                query_params=params)
-        elif self._HTTP_METHOD == 'POST':
+                method=self._HTTP_METHOD, path=self.path, query_params=params
+            )
+        elif self._HTTP_METHOD == "POST":
             return self.api_request(
-                method=self._HTTP_METHOD,
-                path=self.path,
-                data=params)
+                method=self._HTTP_METHOD, path=self.path, data=params
+            )
         else:
-            raise ValueError('Unexpected HTTP method', self._HTTP_METHOD)
+            raise ValueError("Unexpected HTTP method", self._HTTP_METHOD)
 
 
 class _GAXIterator(Iterator):
@@ -425,8 +435,11 @@
 
     def __init__(self, client, page_iter, item_to_value, max_results=None):
         super(_GAXIterator, self).__init__(
-            client, item_to_value, page_token=page_iter.page_token,
-            max_results=max_results)
+            client,
+            item_to_value,
+            page_token=page_iter.page_token,
+            max_results=max_results,
+        )
         self._gax_page_iter = page_iter
 
     def _next_page(self):
@@ -474,21 +487,23 @@
     .. autoattribute:: pages
     """
 
-    _DEFAULT_REQUEST_TOKEN_FIELD = 'page_token'
-    _DEFAULT_RESPONSE_TOKEN_FIELD = 'next_page_token'
+    _DEFAULT_REQUEST_TOKEN_FIELD = "page_token"
+    _DEFAULT_RESPONSE_TOKEN_FIELD = "next_page_token"
 
     def __init__(
-            self,
-            client,
-            method,
-            request,
-            items_field,
-            item_to_value=_item_to_value_identity,
-            request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
-            response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
-            max_results=None):
+        self,
+        client,
+        method,
+        request,
+        items_field,
+        item_to_value=_item_to_value_identity,
+        request_token_field=_DEFAULT_REQUEST_TOKEN_FIELD,
+        response_token_field=_DEFAULT_RESPONSE_TOKEN_FIELD,
+        max_results=None,
+    ):
         super(GRPCIterator, self).__init__(
-            client, item_to_value, max_results=max_results)
+            client, item_to_value, max_results=max_results
+        )
         self._method = method
         self._request = request
         self._items_field = items_field
@@ -506,8 +521,7 @@
             return None
 
         if self.next_page_token is not None:
-            setattr(
-                self._request, self._request_token_field, self.next_page_token)
+            setattr(self._request, self._request_token_field, self.next_page_token)
 
         response = self._method(self._request)
 
diff --git a/google/api_core/path_template.py b/google/api_core/path_template.py
index 5d6609e..bb54935 100644
--- a/google/api_core/path_template.py
+++ b/google/api_core/path_template.py
@@ -41,7 +41,8 @@
 # - "{name=**}": a multi-segment wildcard named variable, for example
 #   "shelf/{name=**}"
 # - "{name=/path/*/**}": a multi-segment named variable with a sub-template.
-_VARIABLE_RE = re.compile(r"""
+_VARIABLE_RE = re.compile(
+    r"""
     (  # Capture the entire variable expression
         (?P<positional>\*\*?)  # Match & capture * and ** positional variables.
         |
@@ -52,11 +53,13 @@
             (?:=(?P<template>.+?))?
         }
     )
-    """, re.VERBOSE)
+    """,
+    re.VERBOSE,
+)
 
 # Segment expressions used for validating paths against a template.
-_SINGLE_SEGMENT_PATTERN = r'([^/]+)'
-_MULTI_SEGMENT_PATTERN = r'(.+)'
+_SINGLE_SEGMENT_PATTERN = r"([^/]+)"
+_MULTI_SEGMENT_PATTERN = r"(.+)"
 
 
 def _expand_variable_match(positional_vars, named_vars, match):
@@ -76,28 +79,26 @@
             template but not specified or if an unexpected template expression
             is encountered.
     """
-    positional = match.group('positional')
-    name = match.group('name')
+    positional = match.group("positional")
+    name = match.group("name")
     if name is not None:
         try:
             return six.text_type(named_vars[name])
         except KeyError:
             raise ValueError(
-                'Named variable \'{}\' not specified and needed by template '
-                '`{}` at position {}'.format(
-                    name, match.string, match.start()))
+                "Named variable '{}' not specified and needed by template "
+                "`{}` at position {}".format(name, match.string, match.start())
+            )
     elif positional is not None:
         try:
             return six.text_type(positional_vars.pop(0))
         except IndexError:
             raise ValueError(
-                'Positional variable not specified and needed by template '
-                '`{}` at position {}'.format(
-                    match.string, match.start()))
+                "Positional variable not specified and needed by template "
+                "`{}` at position {}".format(match.string, match.start())
+            )
     else:
-        raise ValueError(
-            'Unknown template expression {}'.format(
-                match.group(0)))
+        raise ValueError("Unknown template expression {}".format(match.group(0)))
 
 
 def expand(tmpl, *args, **kwargs):
@@ -140,24 +141,22 @@
     Raises:
         ValueError: If an unexpected template expression is encountered.
     """
-    positional = match.group('positional')
-    name = match.group('name')
-    template = match.group('template')
+    positional = match.group("positional")
+    name = match.group("name")
+    template = match.group("template")
     if name is not None:
         if not template:
             return _SINGLE_SEGMENT_PATTERN.format(name)
-        elif template == '**':
+        elif template == "**":
             return _MULTI_SEGMENT_PATTERN.format(name)
         else:
             return _generate_pattern_for_template(template)
-    elif positional == '*':
+    elif positional == "*":
         return _SINGLE_SEGMENT_PATTERN
-    elif positional == '**':
+    elif positional == "**":
         return _MULTI_SEGMENT_PATTERN
     else:
-        raise ValueError(
-            'Unknown template expression {}'.format(
-                match.group(0)))
+        raise ValueError("Unknown template expression {}".format(match.group(0)))
 
 
 def _generate_pattern_for_template(tmpl):
@@ -194,5 +193,5 @@
     Returns:
         bool: True if the path matches.
     """
-    pattern = _generate_pattern_for_template(tmpl) + '$'
+    pattern = _generate_pattern_for_template(tmpl) + "$"
     return True if re.match(pattern, path) is not None else False
diff --git a/google/api_core/protobuf_helpers.py b/google/api_core/protobuf_helpers.py
index 78ab101..9762984 100644
--- a/google/api_core/protobuf_helpers.py
+++ b/google/api_core/protobuf_helpers.py
@@ -57,8 +57,10 @@
     msg = pb_type()
     if not any_pb.Unpack(msg):
         raise TypeError(
-            'Could not convert {} to {}'.format(
-                any_pb.__class__.__name__, pb_type.__name__))
+            "Could not convert {} to {}".format(
+                any_pb.__class__.__name__, pb_type.__name__
+            )
+        )
 
     return msg
 
@@ -78,9 +80,11 @@
 
     not_nones = [val for val in kwargs.values() if val is not None]
     if len(not_nones) > 1:
-        raise ValueError('Only one of {fields} should be set.'.format(
-            fields=', '.join(sorted(kwargs.keys())),
-        ))
+        raise ValueError(
+            "Only one of {fields} should be set.".format(
+                fields=", ".join(sorted(kwargs.keys()))
+            )
+        )
 
 
 def get_messages(module):
@@ -98,13 +102,12 @@
     answer = collections.OrderedDict()
     for name in dir(module):
         candidate = getattr(module, name)
-        if (inspect.isclass(candidate) and
-                issubclass(candidate, message.Message)):
+        if inspect.isclass(candidate) and issubclass(candidate, message.Message):
             answer[name] = candidate
     return answer
 
 
-def _resolve_subkeys(key, separator='.'):
+def _resolve_subkeys(key, separator="."):
     """Resolve a potentially nested key.
 
     If the key contains the ``separator`` (e.g. ``.``) then the key will be
@@ -169,8 +172,10 @@
         answer = msg_or_dict.get(key, default)
     else:
         raise TypeError(
-            'get() expected a dict or protobuf message, got {!r}.'.format(
-                type(msg_or_dict)))
+            "get() expected a dict or protobuf message, got {!r}.".format(
+                type(msg_or_dict)
+            )
+        )
 
     # If the object we got back is our sentinel, raise KeyError; this is
     # a "not found" case.
@@ -225,11 +230,12 @@
         TypeError: If ``msg_or_dict`` is not a Message or dictionary.
     """
     # Sanity check: Is our target object valid?
-    if (not isinstance(msg_or_dict,
-                       (collections_abc.MutableMapping, message.Message))):
+    if not isinstance(msg_or_dict, (collections_abc.MutableMapping, message.Message)):
         raise TypeError(
-            'set() expected a dict or protobuf message, got {!r}.'.format(
-                type(msg_or_dict)))
+            "set() expected a dict or protobuf message, got {!r}.".format(
+                type(msg_or_dict)
+            )
+        )
 
     # We may be setting a nested key. Resolve this.
     basekey, subkey = _resolve_subkeys(key)
@@ -302,15 +308,16 @@
 
     if type(original) != type(modified):
         raise ValueError(
-                'expected that both original and modified should be of the '
-                'same type, received "{!r}" and "{!r}".'.
-                format(type(original), type(modified)))
+            "expected that both original and modified should be of the "
+            'same type, received "{!r}" and "{!r}".'.format(
+                type(original), type(modified)
+            )
+        )
 
-    return field_mask_pb2.FieldMask(
-        paths=_field_mask_helper(original, modified))
+    return field_mask_pb2.FieldMask(paths=_field_mask_helper(original, modified))
 
 
-def _field_mask_helper(original, modified, current=''):
+def _field_mask_helper(original, modified, current=""):
     answer = []
 
     for name in original.DESCRIPTOR.fields_by_name:
@@ -328,8 +335,9 @@
                 elif not modified_val.ListFields():
                     answer.append(field_path)
                 else:
-                    answer.extend(_field_mask_helper(original_val,
-                                                     modified_val, field_path))
+                    answer.extend(
+                        _field_mask_helper(original_val, modified_val, field_path)
+                    )
         else:
             if original_val != modified_val:
                 answer.append(field_path)
@@ -340,7 +348,7 @@
 def _get_path(current, name):
     if not current:
         return name
-    return '%s.%s' % (current, name)
+    return "%s.%s" % (current, name)
 
 
 def _is_message(value):
diff --git a/google/api_core/retry.py b/google/api_core/retry.py
index 328e401..96d9f23 100644
--- a/google/api_core/retry.py
+++ b/google/api_core/retry.py
@@ -86,18 +86,20 @@
         Callable[Exception]: A predicate that returns True if the provided
             exception is of the given type(s).
     """
+
     def if_exception_type_predicate(exception):
         """Bound predicate for checking an exception type."""
         return isinstance(exception, exception_types)
+
     return if_exception_type_predicate
 
 
 # pylint: disable=invalid-name
 # Pylint sees this as a constant, but it is also an alias that should be
 # considered a function.
-if_transient_error = if_exception_type((
-    exceptions.InternalServerError,
-    exceptions.TooManyRequests))
+if_transient_error = if_exception_type(
+    (exceptions.InternalServerError, exceptions.TooManyRequests)
+)
 """A predicate that checks if an exception is a transient API error.
 
 The following server errors are considered transient:
@@ -111,8 +113,7 @@
 # pylint: enable=invalid-name
 
 
-def exponential_sleep_generator(
-        initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER):
+def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER):
     """Generates sleep intervals based on the exponential back-off algorithm.
 
     This implements the `Truncated Exponential Back-off`_ algorithm.
@@ -165,8 +166,9 @@
         Exception: If the target raises a method that isn't retryable.
     """
     if deadline is not None:
-        deadline_datetime = (
-            datetime_helpers.utcnow() + datetime.timedelta(seconds=deadline))
+        deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
+            seconds=deadline
+        )
     else:
         deadline_datetime = None
 
@@ -189,16 +191,20 @@
         if deadline_datetime is not None and deadline_datetime < now:
             six.raise_from(
                 exceptions.RetryError(
-                    'Deadline of {:.1f}s exceeded while calling {}'.format(
-                        deadline, target),
-                    last_exc),
-                last_exc)
+                    "Deadline of {:.1f}s exceeded while calling {}".format(
+                        deadline, target
+                    ),
+                    last_exc,
+                ),
+                last_exc,
+            )
 
-        _LOGGER.debug('Retrying due to {}, sleeping {:.1f}s ...'.format(
-            last_exc, sleep))
+        _LOGGER.debug(
+            "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep)
+        )
         time.sleep(sleep)
 
-    raise ValueError('Sleep generator stopped yielding sleep values.')
+    raise ValueError("Sleep generator stopped yielding sleep values.")
 
 
 @six.python_2_unicode_compatible
@@ -220,13 +226,15 @@
         multiplier (float): The multiplier applied to the delay.
         deadline (float): How long to keep retrying in seconds.
     """
+
     def __init__(
-            self,
-            predicate=if_transient_error,
-            initial=_DEFAULT_INITIAL_DELAY,
-            maximum=_DEFAULT_MAXIMUM_DELAY,
-            multiplier=_DEFAULT_DELAY_MULTIPLIER,
-            deadline=_DEFAULT_DEADLINE):
+        self,
+        predicate=if_transient_error,
+        initial=_DEFAULT_INITIAL_DELAY,
+        maximum=_DEFAULT_MAXIMUM_DELAY,
+        multiplier=_DEFAULT_DELAY_MULTIPLIER,
+        deadline=_DEFAULT_DEADLINE,
+    ):
         self._predicate = predicate
         self._initial = initial
         self._multiplier = multiplier
@@ -246,12 +254,14 @@
             Callable: A callable that will invoke ``func`` with retry
                 behavior.
         """
+
         @general_helpers.wraps(func)
         def retry_wrapped_func(*args, **kwargs):
             """A wrapper that calls target function with retry."""
             target = functools.partial(func, *args, **kwargs)
             sleep_generator = exponential_sleep_generator(
-                self._initial, self._maximum, multiplier=self._multiplier)
+                self._initial, self._maximum, multiplier=self._multiplier
+            )
             return retry_target(
                 target,
                 self._predicate,
@@ -276,7 +286,8 @@
             initial=self._initial,
             maximum=self._maximum,
             multiplier=self._multiplier,
-            deadline=deadline)
+            deadline=deadline,
+        )
 
     def with_predicate(self, predicate):
         """Return a copy of this retry with the given predicate.
@@ -293,10 +304,10 @@
             initial=self._initial,
             maximum=self._maximum,
             multiplier=self._multiplier,
-            deadline=self._deadline)
+            deadline=self._deadline,
+        )
 
-    def with_delay(
-            self, initial=None, maximum=None, multiplier=None):
+    def with_delay(self, initial=None, maximum=None, multiplier=None):
         """Return a copy of this retry with the given delay options.
 
         Args:
@@ -313,11 +324,17 @@
             initial=initial if initial is not None else self._initial,
             maximum=maximum if maximum is not None else self._maximum,
             multiplier=multiplier if maximum is not None else self._multiplier,
-            deadline=self._deadline)
+            deadline=self._deadline,
+        )
 
     def __str__(self):
         return (
-            '<Retry predicate={}, initial={:.1f}, maximum={:.1f}, '
-            'multiplier={:.1f}, deadline={:.1f}>'.format(
-                self._predicate, self._initial, self._maximum,
-                self._multiplier, self._deadline))
+            "<Retry predicate={}, initial={:.1f}, maximum={:.1f}, "
+            "multiplier={:.1f}, deadline={:.1f}>".format(
+                self._predicate,
+                self._initial,
+                self._maximum,
+                self._multiplier,
+                self._deadline,
+            )
+        )
diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py
index 8339de0..17c1bea 100644
--- a/google/api_core/timeout.py
+++ b/google/api_core/timeout.py
@@ -80,6 +80,7 @@
             wrapped function. If `None`, the target function is expected to
             never timeout.
     """
+
     def __init__(self, timeout=None):
         self._timeout = timeout
 
@@ -93,15 +94,17 @@
         Returns:
             Callable: The wrapped function.
         """
+
         @general_helpers.wraps(func)
         def func_with_timeout(*args, **kwargs):
             """Wrapped function that adds timeout."""
-            kwargs['timeout'] = self._timeout
+            kwargs["timeout"] = self._timeout
             return func(*args, **kwargs)
+
         return func_with_timeout
 
     def __str__(self):
-        return '<ConstantTimeout timeout={:.1f}>'.format(self._timeout)
+        return "<ConstantTimeout timeout={:.1f}>".format(self._timeout)
 
 
 def _exponential_timeout_generator(initial, maximum, multiplier, deadline):
@@ -117,9 +120,9 @@
         float: A timeout value.
     """
     if deadline is not None:
-        deadline_datetime = (
-            datetime_helpers.utcnow() +
-            datetime.timedelta(seconds=deadline))
+        deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
+            seconds=deadline
+        )
     else:
         deadline_datetime = datetime.datetime.max
 
@@ -132,7 +135,8 @@
             # The set maximum timeout.
             maximum,
             # The remaining time before the deadline is reached.
-            float((deadline_datetime - now).seconds))
+            float((deadline_datetime - now).seconds),
+        )
         timeout = timeout * multiplier
 
 
@@ -160,12 +164,14 @@
             :mod:`google.api_core.retry`. If ``None``, the timeouts will not
             be adjusted to accomodate an overall deadline.
     """
+
     def __init__(
-            self,
-            initial=_DEFAULT_INITIAL_TIMEOUT,
-            maximum=_DEFAULT_MAXIMUM_TIMEOUT,
-            multiplier=_DEFAULT_TIMEOUT_MULTIPLIER,
-            deadline=_DEFAULT_DEADLINE):
+        self,
+        initial=_DEFAULT_INITIAL_TIMEOUT,
+        maximum=_DEFAULT_MAXIMUM_TIMEOUT,
+        multiplier=_DEFAULT_TIMEOUT_MULTIPLIER,
+        deadline=_DEFAULT_DEADLINE,
+    ):
         self._initial = initial
         self._maximum = maximum
         self._multiplier = multiplier
@@ -184,7 +190,8 @@
             initial=self._initial,
             maximum=self._maximum,
             multiplier=self._multiplier,
-            deadline=deadline)
+            deadline=deadline,
+        )
 
     def __call__(self, func):
         """Apply the timeout decorator.
@@ -197,19 +204,21 @@
             Callable: The wrapped function.
         """
         timeouts = _exponential_timeout_generator(
-            self._initial, self._maximum, self._multiplier, self._deadline)
+            self._initial, self._maximum, self._multiplier, self._deadline
+        )
 
         @general_helpers.wraps(func)
         def func_with_timeout(*args, **kwargs):
             """Wrapped function that adds timeout."""
-            kwargs['timeout'] = next(timeouts)
+            kwargs["timeout"] = next(timeouts)
             return func(*args, **kwargs)
 
         return func_with_timeout
 
     def __str__(self):
         return (
-            '<ExponentialTimeout initial={:.1f}, maximum={:.1f}, '
-            'multiplier={:.1f}, deadline={:.1f}>'.format(
-                self._initial, self._maximum, self._multiplier,
-                self._deadline))
+            "<ExponentialTimeout initial={:.1f}, maximum={:.1f}, "
+            "multiplier={:.1f}, deadline={:.1f}>".format(
+                self._initial, self._maximum, self._multiplier, self._deadline
+            )
+        )
diff --git a/noxfile.py b/noxfile.py
index 77ef2ce..fbc51ee 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -28,85 +28,78 @@
     run the tests.
     """
     # Install all test dependencies, then install this package in-place.
-    session.install(
-        'mock',
-        'pytest',
-        'pytest-cov',
-        'grpcio >= 1.0.2',
-    )
-    session.install('-e', '.')
+    session.install("mock", "pytest", "pytest-cov", "grpcio >= 1.0.2")
+    session.install("-e", ".")
 
     # Run py.test against the unit tests.
     session.run(
-        'py.test',
-        '--quiet',
-        '--cov=google.api_core',
-        '--cov=tests.unit',
-        '--cov-append',
-        '--cov-config=.coveragerc',
-        '--cov-report=',
-        '--cov-fail-under=97',
-        os.path.join('tests', 'unit'),
+        "py.test",
+        "--quiet",
+        "--cov=google.api_core",
+        "--cov=tests.unit",
+        "--cov-append",
+        "--cov-config=.coveragerc",
+        "--cov-report=",
+        "--cov-fail-under=97",
+        os.path.join("tests", "unit"),
         *session.posargs
     )
 
 
-@nox.session(python=['2.7', '3.5', '3.6', '3.7'])
+@nox.session(python=["2.7", "3.5", "3.6", "3.7"])
 def unit(session):
     """Run the unit test suite."""
     default(session)
 
 
-@nox.session(python=['2.7', '3.5', '3.6', '3.7'])
+@nox.session(python=["2.7", "3.5", "3.6", "3.7"])
 def unit_grpc_gcp(session):
     """Run the unit test suite with grpcio-gcp installed."""
 
     # Install grpcio-gcp
-    session.install('grpcio-gcp')
+    session.install("grpcio-gcp")
 
     default(session)
 
 
-@nox.session(python='3.6')
+@nox.session(python="3.6")
 def lint(session):
     """Run linters.
 
     Returns a failure if the linters find linting errors or sufficiently
     serious code quality issues.
     """
-    session.install('flake8', 'flake8-import-order')
-    session.install('.')
-    session.run('flake8', 'google', 'tests')
+    session.install("flake8", "flake8-import-order")
+    session.install(".")
+    session.run("flake8", "google", "tests")
 
 
-@nox.session(python='3.6')
+@nox.session(python="3.6")
 def lint_setup_py(session):
     """Verify that setup.py is valid (including RST check)."""
 
-    session.install('docutils', 'Pygments')
-    session.run(
-        'python', 'setup.py', 'check', '--restructuredtext', '--strict')
+    session.install("docutils", "Pygments")
+    session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
 
 
 # No 2.7 due to https://github.com/google/importlab/issues/26.
 # No 3.7 because pytype supports up to 3.6 only.
-@nox.session(python='3.6')
+@nox.session(python="3.6")
 def pytype(session):
-  """Run type-checking."""
-  session.install('.',
-                  'grpcio >= 1.8.2',
-                  'grpcio-gcp >= 0.2.2',
-                  'pytype >= 2018.9.26')
-  session.run('pytype')
+    """Run type-checking."""
+    session.install(
+        ".", "grpcio >= 1.8.2", "grpcio-gcp >= 0.2.2", "pytype >= 2018.9.26"
+    )
+    session.run("pytype")
 
 
-@nox.session(python='3.6')
+@nox.session(python="3.6")
 def cover(session):
     """Run the final coverage report.
 
     This outputs the coverage report aggregating coverage from the unit
     test runs (not system test runs), and then erases coverage data.
     """
-    session.install('coverage', 'pytest-cov')
-    session.run('coverage', 'report', '--show-missing', '--fail-under=100')
-    session.run('coverage', 'erase')
+    session.install("coverage", "pytest-cov")
+    session.run("coverage", "report", "--show-missing", "--fail-under=100")
+    session.run("coverage", "erase")
diff --git a/setup.py b/setup.py
index ff5a825..8ae217b 100644
--- a/setup.py
+++ b/setup.py
@@ -20,28 +20,28 @@
 
 # Package metadata.
 
-name = 'google-api-core'
-description = 'Google API client core library'
-version = '1.5.2'
+name = "google-api-core"
+description = "Google API client core library"
+version = "1.5.2"
 # Should be one of:
 # 'Development Status :: 3 - Alpha'
 # 'Development Status :: 4 - Beta'
 # 'Development Status :: 5 - Production/Stable'
-release_status = 'Development Status :: 5 - Production/Stable'
+release_status = "Development Status :: 5 - Production/Stable"
 dependencies = [
-    'googleapis-common-protos >= 1.5.3, != 1.5.4, < 2.0dev',
-    'protobuf >= 3.4.0',
-    'google-auth >= 0.4.0, < 2.0dev',
-    'requests >= 2.18.0, < 3.0.0dev',
-    'setuptools >= 34.0.0',
-    'six >= 1.10.0',
-    'pytz',
-    'futures >= 3.2.0; python_version < "3.2"'
+    "googleapis-common-protos >= 1.5.3, != 1.5.4, < 2.0dev",
+    "protobuf >= 3.4.0",
+    "google-auth >= 0.4.0, < 2.0dev",
+    "requests >= 2.18.0, < 3.0.0dev",
+    "setuptools >= 34.0.0",
+    "six >= 1.10.0",
+    "pytz",
+    'futures >= 3.2.0; python_version < "3.2"',
 ]
 extras = {
-    'grpc': 'grpcio >= 1.8.2',
-    'grpcgcp': 'grpcio-gcp >= 0.2.2',
-    'grpcio-gcp': 'grpcio-gcp >= 0.2.2'
+    "grpc": "grpcio >= 1.8.2",
+    "grpcgcp": "grpcio-gcp >= 0.2.2",
+    "grpcio-gcp": "grpcio-gcp >= 0.2.2",
 }
 
 
@@ -49,20 +49,20 @@
 
 package_root = os.path.abspath(os.path.dirname(__file__))
 
-readme_filename = os.path.join(package_root, 'README.rst')
-with io.open(readme_filename, encoding='utf-8') as readme_file:
+readme_filename = os.path.join(package_root, "README.rst")
+with io.open(readme_filename, encoding="utf-8") as readme_file:
     readme = readme_file.read()
 
 # Only include packages under the 'google' namespace. Do not include tests,
 # benchmarks, etc.
 packages = [
-    package for package in setuptools.find_packages()
-    if package.startswith('google')]
+    package for package in setuptools.find_packages() if package.startswith("google")
+]
 
 # Determine which namespaces are needed.
-namespaces = ['google']
-if 'google.cloud' in packages:
-    namespaces.append('google.cloud')
+namespaces = ["google"]
+if "google.cloud" in packages:
+    namespaces.append("google.cloud")
 
 
 setuptools.setup(
@@ -70,25 +70,25 @@
     version=version,
     description=description,
     long_description=readme,
-    author='Google LLC',
-    author_email='googleapis-packages@google.com',
-    license='Apache 2.0',
-    url='https://github.com/GoogleCloudPlatform/google-cloud-python',
+    author="Google LLC",
+    author_email="googleapis-packages@google.com",
+    license="Apache 2.0",
+    url="https://github.com/GoogleCloudPlatform/google-cloud-python",
     classifiers=[
         release_status,
-        'Intended Audience :: Developers',
-        'License :: OSI Approved :: Apache Software License',
-        'Programming Language :: Python',
-        'Programming Language :: Python :: 2',
-        'Programming Language :: Python :: 2.7',
-        'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.5',
-        'Programming Language :: Python :: 3.6',
-        'Programming Language :: Python :: 3.7',
-        'Operating System :: OS Independent',
-        'Topic :: Internet',
+        "Intended Audience :: Developers",
+        "License :: OSI Approved :: Apache Software License",
+        "Programming Language :: Python",
+        "Programming Language :: Python :: 2",
+        "Programming Language :: Python :: 2.7",
+        "Programming Language :: Python :: 3",
+        "Programming Language :: Python :: 3.5",
+        "Programming Language :: Python :: 3.6",
+        "Programming Language :: Python :: 3.7",
+        "Operating System :: OS Independent",
+        "Topic :: Internet",
     ],
-    platforms='Posix; MacOS X; Windows',
+    platforms="Posix; MacOS X; Windows",
     packages=packages,
     namespace_packages=namespaces,
     install_requires=dependencies,
diff --git a/tests/unit/future/test__helpers.py b/tests/unit/future/test__helpers.py
index d5e7aea..98afc59 100644
--- a/tests/unit/future/test__helpers.py
+++ b/tests/unit/future/test__helpers.py
@@ -17,21 +17,21 @@
 from google.api_core.future import _helpers
 
 
-@mock.patch('threading.Thread', autospec=True)
+@mock.patch("threading.Thread", autospec=True)
 def test_start_deamon_thread(unused_thread):
     deamon_thread = _helpers.start_daemon_thread(target=mock.sentinel.target)
     assert deamon_thread.daemon is True
 
 
 def test_safe_invoke_callback():
-    callback = mock.Mock(spec=['__call__'], return_value=42)
-    result = _helpers.safe_invoke_callback(callback, 'a', b='c')
+    callback = mock.Mock(spec=["__call__"], return_value=42)
+    result = _helpers.safe_invoke_callback(callback, "a", b="c")
     assert result == 42
-    callback.assert_called_once_with('a', b='c')
+    callback.assert_called_once_with("a", b="c")
 
 
 def test_safe_invoke_callback_exception():
-    callback = mock.Mock(spec=['__call__'], side_effect=ValueError())
-    result = _helpers.safe_invoke_callback(callback, 'a', b='c')
+    callback = mock.Mock(spec=["__call__"], side_effect=ValueError())
+    result = _helpers.safe_invoke_callback(callback, "a", b="c")
     assert result is None
-    callback.assert_called_once_with('a', b='c')
+    callback.assert_called_once_with("a", b="c")
diff --git a/tests/unit/future/test_polling.py b/tests/unit/future/test_polling.py
index f56f0c5..3e19d07 100644
--- a/tests/unit/future/test_polling.py
+++ b/tests/unit/future/test_polling.py
@@ -58,7 +58,7 @@
 
 def test_set_exception():
     future = PollingFutureImpl()
-    exception = ValueError('meep')
+    exception = ValueError("meep")
 
     future.set_exception(exception)
 
@@ -127,18 +127,20 @@
     def done(self):
         if self._errors:
             error, self._errors = self._errors[0], self._errors[1:]
-            raise error('testing')
+            raise error("testing")
         self.poll_count += 1
         self.set_result(42)
         return True
 
 
 def test_result_transient_error():
-    future = PollingFutureImplTransient((
-        exceptions.TooManyRequests,
-        exceptions.InternalServerError,
-        exceptions.BadGateway,
-    ))
+    future = PollingFutureImplTransient(
+        (
+            exceptions.TooManyRequests,
+            exceptions.InternalServerError,
+            exceptions.BadGateway,
+        )
+    )
     result = future.result()
     assert result == 42
     assert future.poll_count == 1
diff --git a/tests/unit/gapic/test_client_info.py b/tests/unit/gapic/test_client_info.py
index f83c4d5..dbab267 100644
--- a/tests/unit/gapic/test_client_info.py
+++ b/tests/unit/gapic/test_client_info.py
@@ -28,42 +28,42 @@
 
 def test_constructor_options():
     info = client_info.ClientInfo(
-        python_version='1',
-        grpc_version='2',
-        api_core_version='3',
-        gapic_version='4',
-        client_library_version='5')
+        python_version="1",
+        grpc_version="2",
+        api_core_version="3",
+        gapic_version="4",
+        client_library_version="5",
+    )
 
-    assert info.python_version == '1'
-    assert info.grpc_version == '2'
-    assert info.api_core_version == '3'
-    assert info.gapic_version == '4'
-    assert info.client_library_version == '5'
+    assert info.python_version == "1"
+    assert info.grpc_version == "2"
+    assert info.api_core_version == "3"
+    assert info.gapic_version == "4"
+    assert info.client_library_version == "5"
 
 
 def test_to_user_agent_minimal():
     info = client_info.ClientInfo(
-        python_version='1',
-        api_core_version='2',
-        grpc_version=None
+        python_version="1", api_core_version="2", grpc_version=None
     )
 
     user_agent = info.to_user_agent()
 
-    assert user_agent == 'gl-python/1 gax/2'
+    assert user_agent == "gl-python/1 gax/2"
 
 
 def test_to_user_agent_full():
     info = client_info.ClientInfo(
-        python_version='1',
-        grpc_version='2',
-        api_core_version='3',
-        gapic_version='4',
-        client_library_version='5')
+        python_version="1",
+        grpc_version="2",
+        api_core_version="3",
+        gapic_version="4",
+        client_library_version="5",
+    )
 
     user_agent = info.to_user_agent()
 
-    assert user_agent == 'gl-python/1 grpc/2 gax/3 gapic/4 gccl/5'
+    assert user_agent == "gl-python/1 grpc/2 gax/3 gapic/4 gccl/5"
 
 
 def test_to_grpc_metadata():
diff --git a/tests/unit/gapic/test_config.py b/tests/unit/gapic/test_config.py
index d614561..1c15261 100644
--- a/tests/unit/gapic/test_config.py
+++ b/tests/unit/gapic/test_config.py
@@ -17,53 +17,51 @@
 
 
 INTERFACE_CONFIG = {
-    'retry_codes': {
-        'idempotent': ['DEADLINE_EXCEEDED', 'UNAVAILABLE'],
-        'other': ['FAILED_PRECONDITION'],
-        'non_idempotent': []
+    "retry_codes": {
+        "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+        "other": ["FAILED_PRECONDITION"],
+        "non_idempotent": [],
     },
-    'retry_params': {
-        'default': {
-            'initial_retry_delay_millis': 1000,
-            'retry_delay_multiplier': 2.5,
-            'max_retry_delay_millis': 120000,
-            'initial_rpc_timeout_millis': 120000,
-            'rpc_timeout_multiplier': 1.0,
-            'max_rpc_timeout_millis': 120000,
-            'total_timeout_millis': 600000
+    "retry_params": {
+        "default": {
+            "initial_retry_delay_millis": 1000,
+            "retry_delay_multiplier": 2.5,
+            "max_retry_delay_millis": 120000,
+            "initial_rpc_timeout_millis": 120000,
+            "rpc_timeout_multiplier": 1.0,
+            "max_rpc_timeout_millis": 120000,
+            "total_timeout_millis": 600000,
         },
-        'other': {
-            'initial_retry_delay_millis': 1000,
-            'retry_delay_multiplier': 1,
-            'max_retry_delay_millis': 1000,
-            'initial_rpc_timeout_millis': 1000,
-            'rpc_timeout_multiplier': 1,
-            'max_rpc_timeout_millis': 1000,
-            'total_timeout_millis': 1000
+        "other": {
+            "initial_retry_delay_millis": 1000,
+            "retry_delay_multiplier": 1,
+            "max_retry_delay_millis": 1000,
+            "initial_rpc_timeout_millis": 1000,
+            "rpc_timeout_multiplier": 1,
+            "max_rpc_timeout_millis": 1000,
+            "total_timeout_millis": 1000,
         },
     },
-    'methods': {
-        'AnnotateVideo': {
-            'timeout_millis': 60000,
-            'retry_codes_name': 'idempotent',
-            'retry_params_name': 'default'
+    "methods": {
+        "AnnotateVideo": {
+            "timeout_millis": 60000,
+            "retry_codes_name": "idempotent",
+            "retry_params_name": "default",
         },
-        'Other': {
-            'timeout_millis': 60000,
-            'retry_codes_name': 'other',
-            'retry_params_name': 'other'
+        "Other": {
+            "timeout_millis": 60000,
+            "retry_codes_name": "other",
+            "retry_params_name": "other",
         },
-        'Plain': {
-            'timeout_millis': 30000
-        }
-    }
+        "Plain": {"timeout_millis": 30000},
+    },
 }
 
 
 def test_create_method_configs():
     method_configs = config.parse_method_configs(INTERFACE_CONFIG)
 
-    retry, timeout = method_configs['AnnotateVideo']
+    retry, timeout = method_configs["AnnotateVideo"]
     assert retry._predicate(exceptions.DeadlineExceeded(None))
     assert retry._predicate(exceptions.ServiceUnavailable(None))
     assert retry._initial == 1.0
@@ -74,7 +72,7 @@
     assert timeout._multiplier == 1.0
     assert timeout._maximum == 120.0
 
-    retry, timeout = method_configs['Other']
+    retry, timeout = method_configs["Other"]
     assert retry._predicate(exceptions.FailedPrecondition(None))
     assert retry._initial == 1.0
     assert retry._multiplier == 1.0
@@ -84,6 +82,6 @@
     assert timeout._multiplier == 1.0
     assert timeout._maximum == 1.0
 
-    retry, timeout = method_configs['Plain']
+    retry, timeout = method_configs["Plain"]
     assert retry is None
     assert timeout._timeout == 30.0
diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py
index 4ce4e8e..0f9bee9 100644
--- a/tests/unit/gapic/test_method.py
+++ b/tests/unit/gapic/test_method.py
@@ -33,17 +33,17 @@
 
 
 def test_wrap_method_basic():
-    method = mock.Mock(spec=['__call__'], return_value=42)
+    method = mock.Mock(spec=["__call__"], return_value=42)
 
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(method)
 
-    result = wrapped_method(1, 2, meep='moop')
+    result = wrapped_method(1, 2, meep="moop")
 
     assert result == 42
-    method.assert_called_once_with(1, 2, meep='moop', metadata=mock.ANY)
+    method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
 
     # Check that the default client info was specified in the metadata.
-    metadata = method.call_args[1]['metadata']
+    metadata = method.call_args[1]["metadata"]
     assert len(metadata) == 1
     client_info = google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO
     user_agent_metadata = client_info.to_grpc_metadata()
@@ -51,72 +51,78 @@
 
 
 def test_wrap_method_with_no_client_info():
-    method = mock.Mock(spec=['__call__'])
+    method = mock.Mock(spec=["__call__"])
 
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(
-        method, client_info=None)
+        method, client_info=None
+    )
 
-    wrapped_method(1, 2, meep='moop')
+    wrapped_method(1, 2, meep="moop")
 
-    method.assert_called_once_with(1, 2, meep='moop')
+    method.assert_called_once_with(1, 2, meep="moop")
 
 
 def test_wrap_method_with_custom_client_info():
     client_info = google.api_core.gapic_v1.client_info.ClientInfo(
-        python_version=1, grpc_version=2, api_core_version=3, gapic_version=4,
-        client_library_version=5)
-    method = mock.Mock(spec=['__call__'])
+        python_version=1,
+        grpc_version=2,
+        api_core_version=3,
+        gapic_version=4,
+        client_library_version=5,
+    )
+    method = mock.Mock(spec=["__call__"])
 
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(
-        method, client_info=client_info)
+        method, client_info=client_info
+    )
 
-    wrapped_method(1, 2, meep='moop')
+    wrapped_method(1, 2, meep="moop")
 
-    method.assert_called_once_with(1, 2, meep='moop', metadata=mock.ANY)
+    method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
 
     # Check that the custom client info was specified in the metadata.
-    metadata = method.call_args[1]['metadata']
+    metadata = method.call_args[1]["metadata"]
     assert client_info.to_grpc_metadata() in metadata
 
 
 def test_invoke_wrapped_method_with_metadata():
-    method = mock.Mock(spec=['__call__'])
+    method = mock.Mock(spec=["__call__"])
 
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(method)
 
-    wrapped_method(mock.sentinel.request, metadata=[('a', 'b')])
+    wrapped_method(mock.sentinel.request, metadata=[("a", "b")])
 
     method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
-    metadata = method.call_args[1]['metadata']
+    metadata = method.call_args[1]["metadata"]
     # Metadata should have two items: the client info metadata and our custom
     # metadata.
     assert len(metadata) == 2
-    assert ('a', 'b') in metadata
+    assert ("a", "b") in metadata
 
 
 def test_invoke_wrapped_method_with_metadata_as_none():
-    method = mock.Mock(spec=['__call__'])
+    method = mock.Mock(spec=["__call__"])
 
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(method)
 
     wrapped_method(mock.sentinel.request, metadata=None)
 
     method.assert_called_once_with(mock.sentinel.request, metadata=mock.ANY)
-    metadata = method.call_args[1]['metadata']
+    metadata = method.call_args[1]["metadata"]
     # Metadata should have just one items: the client info metadata.
     assert len(metadata) == 1
 
 
-@mock.patch('time.sleep')
+@mock.patch("time.sleep")
 def test_wrap_method_with_default_retry_and_timeout(unusued_sleep):
     method = mock.Mock(
-        spec=['__call__'],
-        side_effect=[exceptions.InternalServerError(None), 42]
+        spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
     )
     default_retry = retry.Retry()
     default_timeout = timeout.ConstantTimeout(60)
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(
-        method, default_retry, default_timeout)
+        method, default_retry, default_timeout
+    )
 
     result = wrapped_method()
 
@@ -125,83 +131,84 @@
     method.assert_called_with(timeout=60, metadata=mock.ANY)
 
 
-@mock.patch('time.sleep')
-def test_wrap_method_with_default_retry_and_timeout_using_sentinel(
-        unusued_sleep):
+@mock.patch("time.sleep")
+def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unusued_sleep):
     method = mock.Mock(
-        spec=['__call__'],
-        side_effect=[exceptions.InternalServerError(None), 42]
+        spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
     )
     default_retry = retry.Retry()
     default_timeout = timeout.ConstantTimeout(60)
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(
-        method, default_retry, default_timeout)
+        method, default_retry, default_timeout
+    )
 
     result = wrapped_method(
         retry=google.api_core.gapic_v1.method.DEFAULT,
-        timeout=google.api_core.gapic_v1.method.DEFAULT)
+        timeout=google.api_core.gapic_v1.method.DEFAULT,
+    )
 
     assert result == 42
     assert method.call_count == 2
     method.assert_called_with(timeout=60, metadata=mock.ANY)
 
 
-@mock.patch('time.sleep')
+@mock.patch("time.sleep")
 def test_wrap_method_with_overriding_retry_and_timeout(unusued_sleep):
-    method = mock.Mock(
-        spec=['__call__'],
-        side_effect=[exceptions.NotFound(None), 42]
-    )
+    method = mock.Mock(spec=["__call__"], side_effect=[exceptions.NotFound(None), 42])
     default_retry = retry.Retry()
     default_timeout = timeout.ConstantTimeout(60)
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(
-        method, default_retry, default_timeout)
+        method, default_retry, default_timeout
+    )
 
     result = wrapped_method(
         retry=retry.Retry(retry.if_exception_type(exceptions.NotFound)),
-        timeout=timeout.ConstantTimeout(22))
+        timeout=timeout.ConstantTimeout(22),
+    )
 
     assert result == 42
     assert method.call_count == 2
     method.assert_called_with(timeout=22, metadata=mock.ANY)
 
 
-@mock.patch('time.sleep')
+@mock.patch("time.sleep")
 @mock.patch(
-    'google.api_core.datetime_helpers.utcnow',
+    "google.api_core.datetime_helpers.utcnow",
     side_effect=_utcnow_monotonic(),
-    autospec=True)
+    autospec=True,
+)
 def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep):
     method = mock.Mock(
-        spec=['__call__'],
-        side_effect=([exceptions.InternalServerError(None)] * 4) + [42]
+        spec=["__call__"],
+        side_effect=([exceptions.InternalServerError(None)] * 4) + [42],
     )
     default_retry = retry.Retry()
     default_timeout = timeout.ExponentialTimeout(deadline=60)
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(
-        method, default_retry, default_timeout)
+        method, default_retry, default_timeout
+    )
 
     # Overriding only the retry's deadline should also override the timeout's
     # deadline.
-    result = wrapped_method(
-        retry=default_retry.with_deadline(30))
+    result = wrapped_method(retry=default_retry.with_deadline(30))
 
     assert result == 42
-    timeout_args = [call[1]['timeout'] for call in method.call_args_list]
+    timeout_args = [call[1]["timeout"] for call in method.call_args_list]
     assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0]
     assert utcnow.call_count == (
-        1 +  # First to set the deadline.
-        5 +  # One for each min(timeout, maximum, (DEADLINE - NOW).seconds)
-        5
+        1
+        + 5  # First to set the deadline.
+        + 5  # One for each min(timeout, maximum, (DEADLINE - NOW).seconds)
     )
 
 
 def test_wrap_method_with_overriding_timeout_as_a_number():
-    method = mock.Mock(spec=['__call__'], return_value=42)
+    method = mock.Mock(spec=["__call__"], return_value=42)
     default_retry = retry.Retry()
     default_timeout = timeout.ConstantTimeout(60)
     wrapped_method = google.api_core.gapic_v1.method.wrap_method(
-        method, default_retry, default_timeout)
+        method, default_retry, default_timeout
+    )
 
     result = wrapped_method(timeout=22)
 
diff --git a/tests/unit/gapic/test_routing_header.py b/tests/unit/gapic/test_routing_header.py
index 6bedf29..77300e8 100644
--- a/tests/unit/gapic/test_routing_header.py
+++ b/tests/unit/gapic/test_routing_header.py
@@ -17,19 +17,18 @@
 
 
 def test_to_routing_header():
-    params = [('name', 'meep'), ('book.read', '1')]
+    params = [("name", "meep"), ("book.read", "1")]
     value = routing_header.to_routing_header(params)
     assert value == "name=meep&book.read=1"
 
 
 def test_to_routing_header_with_slashes():
-    params = [('name', 'me/ep'), ('book.read', '1&2')]
+    params = [("name", "me/ep"), ("book.read", "1&2")]
     value = routing_header.to_routing_header(params)
     assert value == "name=me/ep&book.read=1%262"
 
 
 def test_to_grpc_metadata():
-    params = [('name', 'meep'), ('book.read', '1')]
+    params = [("name", "meep"), ("book.read", "1")]
     metadata = routing_header.to_grpc_metadata(params)
-    assert metadata == (
-        routing_header.ROUTING_METADATA_KEY, "name=meep&book.read=1")
+    assert metadata == (routing_header.ROUTING_METADATA_KEY, "name=meep&book.read=1")
diff --git a/tests/unit/operations_v1/test_operations_client.py b/tests/unit/operations_v1/test_operations_client.py
index 69d4dfc..cc57461 100644
--- a/tests/unit/operations_v1/test_operations_client.py
+++ b/tests/unit/operations_v1/test_operations_client.py
@@ -22,12 +22,12 @@
 def test_get_operation():
     channel = grpc_helpers.ChannelStub()
     client = operations_v1.OperationsClient(channel)
-    channel.GetOperation.response = operations_pb2.Operation(name='meep')
+    channel.GetOperation.response = operations_pb2.Operation(name="meep")
 
-    response = client.get_operation('name')
+    response = client.get_operation("name")
 
     assert len(channel.GetOperation.requests) == 1
-    assert channel.GetOperation.requests[0].name == 'name'
+    assert channel.GetOperation.requests[0].name == "name"
     assert response == channel.GetOperation.response
 
 
@@ -35,13 +35,13 @@
     channel = grpc_helpers.ChannelStub()
     client = operations_v1.OperationsClient(channel)
     operations = [
-        operations_pb2.Operation(name='1'),
-        operations_pb2.Operation(name='2')]
-    list_response = operations_pb2.ListOperationsResponse(
-        operations=operations)
+        operations_pb2.Operation(name="1"),
+        operations_pb2.Operation(name="2"),
+    ]
+    list_response = operations_pb2.ListOperationsResponse(operations=operations)
     channel.ListOperations.response = list_response
 
-    response = client.list_operations('name', 'filter')
+    response = client.list_operations("name", "filter")
 
     assert isinstance(response, page_iterator.Iterator)
     assert list(response) == operations
@@ -49,8 +49,8 @@
     assert len(channel.ListOperations.requests) == 1
     request = channel.ListOperations.requests[0]
     assert isinstance(request, operations_pb2.ListOperationsRequest)
-    assert request.name == 'name'
-    assert request.filter == 'filter'
+    assert request.name == "name"
+    assert request.filter == "filter"
 
 
 def test_delete_operation():
@@ -58,10 +58,10 @@
     client = operations_v1.OperationsClient(channel)
     channel.DeleteOperation.response = empty_pb2.Empty()
 
-    client.delete_operation('name')
+    client.delete_operation("name")
 
     assert len(channel.DeleteOperation.requests) == 1
-    assert channel.DeleteOperation.requests[0].name == 'name'
+    assert channel.DeleteOperation.requests[0].name == "name"
 
 
 def test_cancel_operation():
@@ -69,7 +69,7 @@
     client = operations_v1.OperationsClient(channel)
     channel.CancelOperation.response = empty_pb2.Empty()
 
-    client.cancel_operation('name')
+    client.cancel_operation("name")
 
     assert len(channel.CancelOperation.requests) == 1
-    assert channel.CancelOperation.requests[0].name == 'name'
+    assert channel.CancelOperation.requests[0].name == "name"
diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py
index 9feada3..8163699 100644
--- a/tests/unit/test_bidi.py
+++ b/tests/unit/test_bidi.py
@@ -25,7 +25,6 @@
 
 
 class Test_RequestQueueGenerator(object):
-
     def test_bounded_consume(self):
         call = mock.create_autospec(grpc.Call, instance=True)
         call.is_active.return_value = True
@@ -53,8 +52,7 @@
         call = mock.create_autospec(grpc.Call, instance=True)
         call.is_active.return_value = False
 
-        generator = bidi._RequestQueueGenerator(
-            q, initial_request=mock.sentinel.A)
+        generator = bidi._RequestQueueGenerator(q, initial_request=mock.sentinel.A)
         generator.call = call
 
         items = list(generator)
@@ -68,7 +66,8 @@
         call.is_active.return_value = False
 
         generator = bidi._RequestQueueGenerator(
-            q, initial_request=lambda: mock.sentinel.A)
+            q, initial_request=lambda: mock.sentinel.A
+        )
         generator.call = call
 
         items = list(generator)
@@ -166,7 +165,7 @@
 
     def test_done_callbacks(self):
         bidi_rpc = bidi.BidiRpc(None)
-        callback = mock.Mock(spec=['__call__'])
+        callback = mock.Mock(spec=["__call__"])
 
         bidi_rpc.add_done_callback(callback)
         bidi_rpc._on_call_done(mock.sentinel.future)
@@ -287,10 +286,9 @@
         assert bidi_rpc.is_active is False
 
     def test_done_callbacks_recoverable(self):
-        start_rpc = mock.create_autospec(
-            grpc.StreamStreamMultiCallable, instance=True)
+        start_rpc = mock.create_autospec(grpc.StreamStreamMultiCallable, instance=True)
         bidi_rpc = bidi.ResumableBidiRpc(start_rpc, lambda _: True)
-        callback = mock.Mock(spec=['__call__'])
+        callback = mock.Mock(spec=["__call__"])
 
         bidi_rpc.add_done_callback(callback)
         bidi_rpc._on_call_done(mock.sentinel.future)
@@ -301,7 +299,7 @@
 
     def test_done_callbacks_non_recoverable(self):
         bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False)
-        callback = mock.Mock(spec=['__call__'])
+        callback = mock.Mock(spec=["__call__"])
 
         bidi_rpc.add_done_callback(callback)
         bidi_rpc._on_call_done(mock.sentinel.future)
@@ -313,10 +311,9 @@
         call_1 = CallStub([error], active=False)
         call_2 = CallStub([])
         start_rpc = mock.create_autospec(
-            grpc.StreamStreamMultiCallable,
-            instance=True,
-            side_effect=[call_1, call_2])
-        should_recover = mock.Mock(spec=['__call__'], return_value=True)
+            grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
+        )
+        should_recover = mock.Mock(spec=["__call__"], return_value=True)
         bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
 
         bidi_rpc.open()
@@ -334,10 +331,9 @@
         error = ValueError()
         call = CallStub([error], active=False)
         start_rpc = mock.create_autospec(
-            grpc.StreamStreamMultiCallable,
-            instance=True,
-            return_value=call)
-        should_recover = mock.Mock(spec=['__call__'], return_value=False)
+            grpc.StreamStreamMultiCallable, instance=True, return_value=call
+        )
+        should_recover = mock.Mock(spec=["__call__"], return_value=False)
         bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
 
         bidi_rpc.open()
@@ -358,10 +354,9 @@
         call_1 = CallStub([1, error])
         call_2 = CallStub([2, 3])
         start_rpc = mock.create_autospec(
-            grpc.StreamStreamMultiCallable,
-            instance=True,
-            side_effect=[call_1, call_2])
-        should_recover = mock.Mock(spec=['__call__'], return_value=True)
+            grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
+        )
+        should_recover = mock.Mock(spec=["__call__"], return_value=True)
         bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
 
         bidi_rpc.open()
@@ -379,9 +374,8 @@
         call_1 = CallStub([])
         call_2 = CallStub([])
         start_rpc = mock.create_autospec(
-            grpc.StreamStreamMultiCallable,
-            instance=True,
-            side_effect=[call_1, call_2])
+            grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]
+        )
         callback = mock.Mock()
         callback.return_value = True
         bidi_rpc = bidi.ResumableBidiRpc(start_rpc, callback)
@@ -397,10 +391,9 @@
         error = ValueError()
         call = CallStub([error])
         start_rpc = mock.create_autospec(
-            grpc.StreamStreamMultiCallable,
-            instance=True,
-            return_value=call)
-        should_recover = mock.Mock(spec=['__call__'], return_value=False)
+            grpc.StreamStreamMultiCallable, instance=True, return_value=call
+        )
+        should_recover = mock.Mock(spec=["__call__"], return_value=False)
         bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
 
         bidi_rpc.open()
@@ -415,16 +408,15 @@
         assert call.cancelled is True
 
     def test_reopen_failure_on_rpc_restart(self):
-        error1 = ValueError('1')
-        error2 = ValueError('2')
+        error1 = ValueError("1")
+        error2 = ValueError("2")
         call = CallStub([error1])
         # Invoking start RPC a second time will trigger an error.
         start_rpc = mock.create_autospec(
-            grpc.StreamStreamMultiCallable,
-            instance=True,
-            side_effect=[call, error2])
-        should_recover = mock.Mock(spec=['__call__'], return_value=True)
-        callback = mock.Mock(spec=['__call__'])
+            grpc.StreamStreamMultiCallable, instance=True, side_effect=[call, error2]
+        )
+        should_recover = mock.Mock(spec=["__call__"], return_value=True)
+        callback = mock.Mock(spec=["__call__"])
 
         bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover)
         bidi_rpc.add_done_callback(callback)
@@ -453,13 +445,12 @@
             bidi_rpc.recv()
 
     def test_finalize_idempotent(self):
-        error1 = ValueError('1')
-        error2 = ValueError('2')
-        callback = mock.Mock(spec=['__call__'])
-        should_recover = mock.Mock(spec=['__call__'], return_value=False)
+        error1 = ValueError("1")
+        error2 = ValueError("2")
+        callback = mock.Mock(spec=["__call__"])
+        should_recover = mock.Mock(spec=["__call__"], return_value=False)
 
-        bidi_rpc = bidi.ResumableBidiRpc(
-            mock.sentinel.start_rpc, should_recover)
+        bidi_rpc = bidi.ResumableBidiRpc(mock.sentinel.start_rpc, should_recover)
 
         bidi_rpc.add_done_callback(callback)
 
@@ -514,10 +505,9 @@
         should_continue = threading.Event()
         responses_and_events = {
             mock.sentinel.response_1: threading.Event(),
-            mock.sentinel.response_2: threading.Event()
+            mock.sentinel.response_2: threading.Event(),
         }
-        bidi_rpc.recv.side_effect = [
-            mock.sentinel.response_1, mock.sentinel.response_2]
+        bidi_rpc.recv.side_effect = [mock.sentinel.response_1, mock.sentinel.response_2]
 
         recved_responses = []
         consumer = None
@@ -549,8 +539,7 @@
 
         responses_and_events[mock.sentinel.response_2].wait()
 
-        assert recved_responses == [
-            mock.sentinel.response_1, mock.sentinel.response_2]
+        assert recved_responses == [mock.sentinel.response_1, mock.sentinel.response_2]
 
         consumer.stop()
 
@@ -561,8 +550,7 @@
 
         bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
         bidi_rpc.is_active = True
-        bidi_rpc.add_done_callback.side_effect = (
-            lambda _: should_continue.set())
+        bidi_rpc.add_done_callback.side_effect = lambda _: should_continue.set()
 
         consumer = bidi.BackgroundConsumer(bidi_rpc, mock.sentinel.on_response)
 
@@ -573,8 +561,7 @@
 
         # Wait for add_done_callback to be called
         should_continue.wait()
-        bidi_rpc.add_done_callback.assert_called_once_with(
-            consumer._on_call_done)
+        bidi_rpc.add_done_callback.assert_called_once_with(consumer._on_call_done)
 
         # The consumer should now be blocked on waiting to be unpaused.
         assert consumer.is_active
@@ -594,9 +581,9 @@
 
         bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
         bidi_rpc.is_active = True
-        bidi_rpc.recv.side_effect = exceptions.ServiceUnavailable('Gone away')
+        bidi_rpc.recv.side_effect = exceptions.ServiceUnavailable("Gone away")
 
-        on_response = mock.Mock(spec=['__call__'])
+        on_response = mock.Mock(spec=["__call__"])
 
         consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
 
@@ -608,7 +595,7 @@
 
         on_response.assert_not_called()
         bidi_rpc.recv.assert_called_once()
-        assert 'caught error' in caplog.text
+        assert "caught error" in caplog.text
 
     def test_consumer_unexpected_error(self, caplog):
         caplog.set_level(logging.DEBUG)
@@ -617,7 +604,7 @@
         bidi_rpc.is_active = True
         bidi_rpc.recv.side_effect = ValueError()
 
-        on_response = mock.Mock(spec=['__call__'])
+        on_response = mock.Mock(spec=["__call__"])
 
         consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
 
@@ -629,13 +616,13 @@
 
         on_response.assert_not_called()
         bidi_rpc.recv.assert_called_once()
-        assert 'caught unexpected exception' in caplog.text
+        assert "caught unexpected exception" in caplog.text
 
     def test_double_stop(self, caplog):
         caplog.set_level(logging.DEBUG)
         bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
         bidi_rpc.is_active = True
-        on_response = mock.Mock(spec=['__call__'])
+        on_response = mock.Mock(spec=["__call__"])
 
         def close_side_effect():
             bidi_rpc.is_active = False
diff --git a/tests/unit/test_datetime_helpers.py b/tests/unit/test_datetime_helpers.py
index 03b9477..2f99235 100644
--- a/tests/unit/test_datetime_helpers.py
+++ b/tests/unit/test_datetime_helpers.py
@@ -35,8 +35,7 @@
 
 def test_to_microseconds():
     microseconds = 314159
-    dt = datetime.datetime(
-        1970, 1, 1, 0, 0, 0, microsecond=microseconds)
+    dt = datetime.datetime(1970, 1, 1, 0, 0, 0, microsecond=microseconds)
     assert datetime_helpers.to_microseconds(dt) == microseconds
 
 
@@ -48,112 +47,115 @@
 
 def test_to_microseconds_naive():
     microseconds = 314159
-    dt = datetime.datetime(
-        1970, 1, 1, 0, 0, 0, microsecond=microseconds, tzinfo=None)
+    dt = datetime.datetime(1970, 1, 1, 0, 0, 0, microsecond=microseconds, tzinfo=None)
     assert datetime_helpers.to_microseconds(dt) == microseconds
 
 
 def test_from_microseconds():
     five_mins_from_epoch_in_microseconds = 5 * ONE_MINUTE_IN_MICROSECONDS
     five_mins_from_epoch_datetime = datetime.datetime(
-        1970, 1, 1, 0, 5, 0, tzinfo=pytz.utc)
+        1970, 1, 1, 0, 5, 0, tzinfo=pytz.utc
+    )
 
-    result = datetime_helpers.from_microseconds(
-        five_mins_from_epoch_in_microseconds)
+    result = datetime_helpers.from_microseconds(five_mins_from_epoch_in_microseconds)
 
     assert result == five_mins_from_epoch_datetime
 
 
 def test_from_iso8601_date():
     today = datetime.date.today()
-    iso_8601_today = today.strftime('%Y-%m-%d')
+    iso_8601_today = today.strftime("%Y-%m-%d")
 
     assert datetime_helpers.from_iso8601_date(iso_8601_today) == today
 
 
 def test_from_iso8601_time():
-    assert (
-        datetime_helpers.from_iso8601_time('12:09:42') ==
-        datetime.time(12, 9, 42))
+    assert datetime_helpers.from_iso8601_time("12:09:42") == datetime.time(12, 9, 42)
 
 
 def test_from_rfc3339():
-    value = '2009-12-17T12:44:32.123456Z'
+    value = "2009-12-17T12:44:32.123456Z"
     assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
-        2009, 12, 17, 12, 44, 32, 123456, pytz.utc)
+        2009, 12, 17, 12, 44, 32, 123456, pytz.utc
+    )
 
 
 def test_from_rfc3339_with_bad_tz():
-    value = '2009-12-17T12:44:32.123456BAD'
+    value = "2009-12-17T12:44:32.123456BAD"
 
     with pytest.raises(ValueError):
         datetime_helpers.from_rfc3339(value)
 
 
 def test_from_rfc3339_with_nanos():
-    value = '2009-12-17T12:44:32.123456789Z'
+    value = "2009-12-17T12:44:32.123456789Z"
 
     with pytest.raises(ValueError):
         datetime_helpers.from_rfc3339(value)
 
 
 def test_from_rfc3339_nanos_without_nanos():
-    value = '2009-12-17T12:44:32Z'
+    value = "2009-12-17T12:44:32Z"
     assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
-        2009, 12, 17, 12, 44, 32, 0, pytz.utc)
+        2009, 12, 17, 12, 44, 32, 0, pytz.utc
+    )
 
 
 def test_from_rfc3339_nanos_with_bad_tz():
-    value = '2009-12-17T12:44:32.123456789BAD'
+    value = "2009-12-17T12:44:32.123456789BAD"
 
     with pytest.raises(ValueError):
         datetime_helpers.from_rfc3339_nanos(value)
 
 
-@pytest.mark.parametrize('truncated, micros', [
-    ('12345678', 123456),
-    ('1234567', 123456),
-    ('123456', 123456),
-    ('12345', 123450),
-    ('1234', 123400),
-    ('123', 123000),
-    ('12', 120000),
-    ('1', 100000)])
+@pytest.mark.parametrize(
+    "truncated, micros",
+    [
+        ("12345678", 123456),
+        ("1234567", 123456),
+        ("123456", 123456),
+        ("12345", 123450),
+        ("1234", 123400),
+        ("123", 123000),
+        ("12", 120000),
+        ("1", 100000),
+    ],
+)
 def test_from_rfc3339_nanos_with_truncated_nanos(truncated, micros):
-    value = '2009-12-17T12:44:32.{}Z'.format(truncated)
+    value = "2009-12-17T12:44:32.{}Z".format(truncated)
     assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
-        2009, 12, 17, 12, 44, 32, micros, pytz.utc)
+        2009, 12, 17, 12, 44, 32, micros, pytz.utc
+    )
 
 
 def test_to_rfc3339():
     value = datetime.datetime(2016, 4, 5, 13, 30, 0)
-    expected = '2016-04-05T13:30:00.000000Z'
+    expected = "2016-04-05T13:30:00.000000Z"
     assert datetime_helpers.to_rfc3339(value) == expected
 
 
 def test_to_rfc3339_with_utc():
     value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=pytz.utc)
-    expected = '2016-04-05T13:30:00.000000Z'
+    expected = "2016-04-05T13:30:00.000000Z"
     assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
 
 
 def test_to_rfc3339_with_non_utc():
     zone = pytz.FixedOffset(-60)
     value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
-    expected = '2016-04-05T14:30:00.000000Z'
+    expected = "2016-04-05T14:30:00.000000Z"
     assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
 
 
 def test_to_rfc3339_with_non_utc_ignore_zone():
     zone = pytz.FixedOffset(-60)
     value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
-    expected = '2016-04-05T13:30:00.000000Z'
+    expected = "2016-04-05T13:30:00.000000Z"
     assert datetime_helpers.to_rfc3339(value, ignore_zone=True) == expected
 
 
 def test_datetimewithnanos_ctor_wo_nanos():
-    stamp = datetime_helpers.DatetimeWithNanoseconds(
-        2016, 12, 20, 21, 13, 47, 123456)
+    stamp = datetime_helpers.DatetimeWithNanoseconds(2016, 12, 20, 21, 13, 47, 123456)
     assert stamp.year == 2016
     assert stamp.month == 12
     assert stamp.day == 20
@@ -166,7 +168,8 @@
 
 def test_datetimewithnanos_ctor_w_nanos():
     stamp = datetime_helpers.DatetimeWithNanoseconds(
-        2016, 12, 20, 21, 13, 47, nanosecond=123456789)
+        2016, 12, 20, 21, 13, 47, nanosecond=123456789
+    )
     assert stamp.year == 2016
     assert stamp.month == 12
     assert stamp.day == 20
@@ -180,64 +183,64 @@
 def test_datetimewithnanos_ctor_w_micros_positional_and_nanos():
     with pytest.raises(TypeError):
         datetime_helpers.DatetimeWithNanoseconds(
-            2016, 12, 20, 21, 13, 47, 123456, nanosecond=123456789)
+            2016, 12, 20, 21, 13, 47, 123456, nanosecond=123456789
+        )
 
 
 def test_datetimewithnanos_ctor_w_micros_keyword_and_nanos():
     with pytest.raises(TypeError):
         datetime_helpers.DatetimeWithNanoseconds(
-            2016, 12, 20, 21, 13, 47,
-            microsecond=123456, nanosecond=123456789)
+            2016, 12, 20, 21, 13, 47, microsecond=123456, nanosecond=123456789
+        )
 
 
 def test_datetimewithnanos_rfc339_wo_nanos():
-    stamp = datetime_helpers.DatetimeWithNanoseconds(
-        2016, 12, 20, 21, 13, 47, 123456)
-    assert stamp.rfc3339() == '2016-12-20T21:13:47.123456Z'
+    stamp = datetime_helpers.DatetimeWithNanoseconds(2016, 12, 20, 21, 13, 47, 123456)
+    assert stamp.rfc3339() == "2016-12-20T21:13:47.123456Z"
 
 
 def test_datetimewithnanos_rfc339_w_nanos():
     stamp = datetime_helpers.DatetimeWithNanoseconds(
-        2016, 12, 20, 21, 13, 47, nanosecond=123456789)
-    assert stamp.rfc3339() == '2016-12-20T21:13:47.123456789Z'
+        2016, 12, 20, 21, 13, 47, nanosecond=123456789
+    )
+    assert stamp.rfc3339() == "2016-12-20T21:13:47.123456789Z"
 
 
 def test_datetimewithnanos_rfc339_w_nanos_no_trailing_zeroes():
     stamp = datetime_helpers.DatetimeWithNanoseconds(
-        2016, 12, 20, 21, 13, 47, nanosecond=100000000)
-    assert stamp.rfc3339() == '2016-12-20T21:13:47.1Z'
+        2016, 12, 20, 21, 13, 47, nanosecond=100000000
+    )
+    assert stamp.rfc3339() == "2016-12-20T21:13:47.1Z"
 
 
 def test_datetimewithnanos_from_rfc3339_w_invalid():
-    stamp = '2016-12-20T21:13:47'
+    stamp = "2016-12-20T21:13:47"
     with pytest.raises(ValueError):
         datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(stamp)
 
 
 def test_datetimewithnanos_from_rfc3339_wo_fraction():
-    timestamp = '2016-12-20T21:13:47Z'
+    timestamp = "2016-12-20T21:13:47Z"
     expected = datetime_helpers.DatetimeWithNanoseconds(
-        2016, 12, 20, 21, 13, 47,
-        tzinfo=pytz.UTC)
+        2016, 12, 20, 21, 13, 47, tzinfo=pytz.UTC
+    )
     stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
-    assert (stamp == expected)
+    assert stamp == expected
 
 
 def test_datetimewithnanos_from_rfc3339_w_partial_precision():
-    timestamp = '2016-12-20T21:13:47.1Z'
+    timestamp = "2016-12-20T21:13:47.1Z"
     expected = datetime_helpers.DatetimeWithNanoseconds(
-        2016, 12, 20, 21, 13, 47,
-        microsecond=100000,
-        tzinfo=pytz.UTC)
+        2016, 12, 20, 21, 13, 47, microsecond=100000, tzinfo=pytz.UTC
+    )
     stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
     assert stamp == expected
 
 
 def test_datetimewithnanos_from_rfc3339_w_full_precision():
-    timestamp = '2016-12-20T21:13:47.123456789Z'
+    timestamp = "2016-12-20T21:13:47.123456789Z"
     expected = datetime_helpers.DatetimeWithNanoseconds(
-        2016, 12, 20, 21, 13, 47,
-        nanosecond=123456789,
-        tzinfo=pytz.UTC)
+        2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=pytz.UTC
+    )
     stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
     assert stamp == expected
diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py
index 25b389f..040ac8a 100644
--- a/tests/unit/test_exceptions.py
+++ b/tests/unit/test_exceptions.py
@@ -23,34 +23,33 @@
 
 
 def test_create_google_cloud_error():
-    exception = exceptions.GoogleAPICallError('Testing')
+    exception = exceptions.GoogleAPICallError("Testing")
     exception.code = 600
-    assert str(exception) == '600 Testing'
-    assert exception.message == 'Testing'
+    assert str(exception) == "600 Testing"
+    assert exception.message == "Testing"
     assert exception.errors == []
     assert exception.response is None
 
 
 def test_create_google_cloud_error_with_args():
     error = {
-        'domain': 'global',
-        'location': 'test',
-        'locationType': 'testing',
-        'message': 'Testing',
-        'reason': 'test',
+        "domain": "global",
+        "location": "test",
+        "locationType": "testing",
+        "message": "Testing",
+        "reason": "test",
     }
     response = mock.sentinel.response
-    exception = exceptions.GoogleAPICallError(
-        'Testing', [error], response=response)
+    exception = exceptions.GoogleAPICallError("Testing", [error], response=response)
     exception.code = 600
-    assert str(exception) == '600 Testing'
-    assert exception.message == 'Testing'
+    assert str(exception) == "600 Testing"
+    assert exception.message == "Testing"
     assert exception.errors == [error]
     assert exception.response == response
 
 
 def test_from_http_status():
-    message = 'message'
+    message = "message"
     exception = exceptions.from_http_status(http_client.NOT_FOUND, message)
     assert exception.code == http_client.NOT_FOUND
     assert exception.message == message
@@ -58,11 +57,12 @@
 
 
 def test_from_http_status_with_errors_and_response():
-    message = 'message'
-    errors = ['1', '2']
+    message = "message"
+    errors = ["1", "2"]
     response = mock.sentinel.response
     exception = exceptions.from_http_status(
-        http_client.NOT_FOUND, message, errors=errors, response=response)
+        http_client.NOT_FOUND, message, errors=errors, response=response
+    )
 
     assert isinstance(exception, exceptions.NotFound)
     assert exception.code == http_client.NOT_FOUND
@@ -72,7 +72,7 @@
 
 
 def test_from_http_status_unknown_code():
-    message = 'message'
+    message = "message"
     status_code = 156
     exception = exceptions.from_http_status(status_code, message)
     assert exception.code == status_code
@@ -84,7 +84,8 @@
     response._content = content
     response.status_code = http_client.NOT_FOUND
     response.request = requests.Request(
-        method='POST', url='https://example.com').prepare()
+        method="POST", url="https://example.com"
+    ).prepare()
     return response
 
 
@@ -95,66 +96,63 @@
 
     assert isinstance(exception, exceptions.NotFound)
     assert exception.code == http_client.NOT_FOUND
-    assert exception.message == 'POST https://example.com/: unknown error'
+    assert exception.message == "POST https://example.com/: unknown error"
     assert exception.response == response
 
 
 def test_from_http_response_text_content():
-    response = make_response(b'message')
+    response = make_response(b"message")
 
     exception = exceptions.from_http_response(response)
 
     assert isinstance(exception, exceptions.NotFound)
     assert exception.code == http_client.NOT_FOUND
-    assert exception.message == 'POST https://example.com/: message'
+    assert exception.message == "POST https://example.com/: message"
 
 
 def test_from_http_response_json_content():
-    response = make_response(json.dumps({
-        'error': {
-            'message': 'json message',
-            'errors': ['1', '2']
-        }
-    }).encode('utf-8'))
+    response = make_response(
+        json.dumps({"error": {"message": "json message", "errors": ["1", "2"]}}).encode(
+            "utf-8"
+        )
+    )
 
     exception = exceptions.from_http_response(response)
 
     assert isinstance(exception, exceptions.NotFound)
     assert exception.code == http_client.NOT_FOUND
-    assert exception.message == 'POST https://example.com/: json message'
-    assert exception.errors == ['1', '2']
+    assert exception.message == "POST https://example.com/: json message"
+    assert exception.errors == ["1", "2"]
 
 
 def test_from_http_response_bad_json_content():
-    response = make_response(json.dumps({'meep': 'moop'}).encode('utf-8'))
+    response = make_response(json.dumps({"meep": "moop"}).encode("utf-8"))
 
     exception = exceptions.from_http_response(response)
 
     assert isinstance(exception, exceptions.NotFound)
     assert exception.code == http_client.NOT_FOUND
-    assert exception.message == 'POST https://example.com/: unknown error'
+    assert exception.message == "POST https://example.com/: unknown error"
 
 
 def test_from_http_response_json_unicode_content():
-    response = make_response(json.dumps({
-        'error': {
-            'message': u'\u2019 message',
-            'errors': ['1', '2']
-        }
-    }).encode('utf-8'))
+    response = make_response(
+        json.dumps(
+            {"error": {"message": u"\u2019 message", "errors": ["1", "2"]}}
+        ).encode("utf-8")
+    )
 
     exception = exceptions.from_http_response(response)
 
     assert isinstance(exception, exceptions.NotFound)
     assert exception.code == http_client.NOT_FOUND
-    assert exception.message == u'POST https://example.com/: \u2019 message'
-    assert exception.errors == ['1', '2']
+    assert exception.message == u"POST https://example.com/: \u2019 message"
+    assert exception.errors == ["1", "2"]
 
 
 def test_from_grpc_status():
-    message = 'message'
-    exception = exceptions.from_grpc_status(
-        grpc.StatusCode.OUT_OF_RANGE, message)
+    message = "message"
+    exception = exceptions.from_grpc_status(grpc.StatusCode.OUT_OF_RANGE, message)
     assert isinstance(exception, exceptions.BadRequest)
     assert isinstance(exception, exceptions.OutOfRange)
     assert exception.code == http_client.BAD_REQUEST
@@ -164,12 +162,12 @@
 
 
 def test_from_grpc_status_with_errors_and_response():
-    message = 'message'
+    message = "message"
     response = mock.sentinel.response
-    errors = ['1', '2']
+    errors = ["1", "2"]
     exception = exceptions.from_grpc_status(
-        grpc.StatusCode.OUT_OF_RANGE, message,
-        errors=errors, response=response)
+        grpc.StatusCode.OUT_OF_RANGE, message, errors=errors, response=response
+    )
 
     assert isinstance(exception, exceptions.OutOfRange)
     assert exception.message == message
@@ -178,15 +176,14 @@
 
 
 def test_from_grpc_status_unknown_code():
-    message = 'message'
-    exception = exceptions.from_grpc_status(
-        grpc.StatusCode.OK, message)
+    message = "message"
+    exception = exceptions.from_grpc_status(grpc.StatusCode.OK, message)
     assert exception.grpc_status_code == grpc.StatusCode.OK
     assert exception.message == message
 
 
 def test_from_grpc_error():
-    message = 'message'
+    message = "message"
     error = mock.create_autospec(grpc.Call, instance=True)
     error.code.return_value = grpc.StatusCode.INVALID_ARGUMENT
     error.details.return_value = message
@@ -203,7 +200,7 @@
 
 
 def test_from_grpc_error_non_call():
-    message = 'message'
+    message = "message"
     error = mock.create_autospec(grpc.RpcError, instance=True)
     error.__str__.return_value = message
 
diff --git a/tests/unit/test_general_helpers.py b/tests/unit/test_general_helpers.py
index 2825193..027d489 100644
--- a/tests/unit/test_general_helpers.py
+++ b/tests/unit/test_general_helpers.py
@@ -18,7 +18,6 @@
 
 
 def test_wraps_normal_func():
-
     def func():
         return 42
 
@@ -30,7 +29,6 @@
 
 
 def test_wraps_partial():
-
     def func():
         return 42
 
diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py
index b91847c..c37c3ee 100644
--- a/tests/unit/test_grpc_helpers.py
+++ b/tests/unit/test_grpc_helpers.py
@@ -23,22 +23,22 @@
 
 
 def test__patch_callable_name():
-    callable = mock.Mock(spec=['__class__'])
-    callable.__class__ = mock.Mock(spec=['__name__'])
-    callable.__class__.__name__ = 'TestCallable'
+    callable = mock.Mock(spec=["__class__"])
+    callable.__class__ = mock.Mock(spec=["__name__"])
+    callable.__class__.__name__ = "TestCallable"
 
     grpc_helpers._patch_callable_name(callable)
 
-    assert callable.__name__ == 'TestCallable'
+    assert callable.__name__ == "TestCallable"
 
 
 def test__patch_callable_name_no_op():
-    callable = mock.Mock(spec=['__name__'])
-    callable.__name__ = 'test_callable'
+    callable = mock.Mock(spec=["__name__"])
+    callable.__name__ = "test_callable"
 
     grpc_helpers._patch_callable_name(callable)
 
-    assert callable.__name__ == 'test_callable'
+    assert callable.__name__ == "test_callable"
 
 
 class RpcErrorImpl(grpc.RpcError, grpc.Call):
@@ -55,35 +55,34 @@
 
 def test_wrap_unary_errors():
     grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
-    callable_ = mock.Mock(spec=['__call__'], side_effect=grpc_error)
+    callable_ = mock.Mock(spec=["__call__"], side_effect=grpc_error)
 
     wrapped_callable = grpc_helpers._wrap_unary_errors(callable_)
 
     with pytest.raises(exceptions.InvalidArgument) as exc_info:
-        wrapped_callable(1, 2, three='four')
+        wrapped_callable(1, 2, three="four")
 
-    callable_.assert_called_once_with(1, 2, three='four')
+    callable_.assert_called_once_with(1, 2, three="four")
     assert exc_info.value.response == grpc_error
 
 
 def test_wrap_stream_okay():
     expected_responses = [1, 2, 3]
-    callable_ = mock.Mock(spec=[
-        '__call__'], return_value=iter(expected_responses))
+    callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses))
 
     wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
 
-    got_iterator = wrapped_callable(1, 2, three='four')
+    got_iterator = wrapped_callable(1, 2, three="four")
 
     responses = list(got_iterator)
 
-    callable_.assert_called_once_with(1, 2, three='four')
+    callable_.assert_called_once_with(1, 2, three="four")
     assert responses == expected_responses
 
 
 def test_wrap_stream_iterable_iterface():
     response_iter = mock.create_autospec(grpc.Call, instance=True)
-    callable_ = mock.Mock(spec=['__call__'], return_value=response_iter)
+    callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
 
     wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
 
@@ -119,14 +118,14 @@
 
 def test_wrap_stream_errors_invocation():
     grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
-    callable_ = mock.Mock(spec=['__call__'], side_effect=grpc_error)
+    callable_ = mock.Mock(spec=["__call__"], side_effect=grpc_error)
 
     wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
 
     with pytest.raises(exceptions.InvalidArgument) as exc_info:
-        wrapped_callable(1, 2, three='four')
+        wrapped_callable(1, 2, three="four")
 
-    callable_.assert_called_once_with(1, 2, three='four')
+    callable_.assert_called_once_with(1, 2, three="four")
     assert exc_info.value.response == grpc_error
 
 
@@ -143,20 +142,20 @@
 def test_wrap_stream_errors_iterator():
     grpc_error = RpcErrorImpl(grpc.StatusCode.UNAVAILABLE)
     response_iter = RpcResponseIteratorImpl(grpc_error)
-    callable_ = mock.Mock(spec=['__call__'], return_value=response_iter)
+    callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
 
     wrapped_callable = grpc_helpers._wrap_stream_errors(callable_)
 
-    got_iterator = wrapped_callable(1, 2, three='four')
+    got_iterator = wrapped_callable(1, 2, three="four")
 
     with pytest.raises(exceptions.ServiceUnavailable) as exc_info:
         next(got_iterator)
 
-    callable_.assert_called_once_with(1, 2, three='four')
+    callable_.assert_called_once_with(1, 2, three="four")
     assert exc_info.value.response == grpc_error
 
 
-@mock.patch('google.api_core.grpc_helpers._wrap_unary_errors')
+@mock.patch("google.api_core.grpc_helpers._wrap_unary_errors")
 def test_wrap_errors_non_streaming(wrap_unary_errors):
     callable_ = mock.create_autospec(grpc.UnaryUnaryMultiCallable)
 
@@ -166,7 +165,7 @@
     wrap_unary_errors.assert_called_once_with(callable_)
 
 
-@mock.patch('google.api_core.grpc_helpers._wrap_stream_errors')
+@mock.patch("google.api_core.grpc_helpers._wrap_stream_errors")
 def test_wrap_errors_streaming(wrap_stream_errors):
     callable_ = mock.create_autospec(grpc.UnaryStreamMultiCallable)
 
@@ -176,36 +175,36 @@
     wrap_stream_errors.assert_called_once_with(callable_)
 
 
-@mock.patch('grpc.composite_channel_credentials')
+@mock.patch("grpc.composite_channel_credentials")
 @mock.patch(
-    'google.auth.default',
-    return_value=(mock.sentinel.credentials, mock.sentinel.projet))
-@mock.patch('grpc.secure_channel')
-def test_create_channel_implicit(
-        grpc_secure_channel, default, composite_creds_call):
-    target = 'example.com:443'
+    "google.auth.default",
+    return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.secure_channel")
+def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call):
+    target = "example.com:443"
     composite_creds = composite_creds_call.return_value
 
     channel = grpc_helpers.create_channel(target)
 
     assert channel is grpc_secure_channel.return_value
     default.assert_called_once_with(scopes=None)
-    if (grpc_helpers.HAS_GRPC_GCP):
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds, None)
+    if grpc_helpers.HAS_GRPC_GCP:
+        grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
     else:
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds)
+        grpc_secure_channel.assert_called_once_with(target, composite_creds)
 
 
-@mock.patch('grpc.composite_channel_credentials')
+@mock.patch("grpc.composite_channel_credentials")
 @mock.patch(
-    'google.auth.default',
-    return_value=(mock.sentinel.credentials, mock.sentinel.projet))
-@mock.patch('grpc.secure_channel')
+    "google.auth.default",
+    return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.secure_channel")
 def test_create_channel_implicit_with_ssl_creds(
-        grpc_secure_channel, default, composite_creds_call):
-    target = 'example.com:443'
+    grpc_secure_channel, default, composite_creds_call
+):
+    target = "example.com:443"
 
     ssl_creds = grpc.ssl_channel_credentials()
 
@@ -214,147 +213,127 @@
     default.assert_called_once_with(scopes=None)
     composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
     composite_creds = composite_creds_call.return_value
-    if (grpc_helpers.HAS_GRPC_GCP):
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds, None)
+    if grpc_helpers.HAS_GRPC_GCP:
+        grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
     else:
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds)
+        grpc_secure_channel.assert_called_once_with(target, composite_creds)
 
 
-@mock.patch('grpc.composite_channel_credentials')
+@mock.patch("grpc.composite_channel_credentials")
 @mock.patch(
-    'google.auth.default',
-    return_value=(mock.sentinel.credentials, mock.sentinel.projet))
-@mock.patch('grpc.secure_channel')
+    "google.auth.default",
+    return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+)
+@mock.patch("grpc.secure_channel")
 def test_create_channel_implicit_with_scopes(
-        grpc_secure_channel, default, composite_creds_call):
-    target = 'example.com:443'
+    grpc_secure_channel, default, composite_creds_call
+):
+    target = "example.com:443"
     composite_creds = composite_creds_call.return_value
 
-    channel = grpc_helpers.create_channel(target, scopes=['one', 'two'])
+    channel = grpc_helpers.create_channel(target, scopes=["one", "two"])
 
     assert channel is grpc_secure_channel.return_value
-    default.assert_called_once_with(scopes=['one', 'two'])
-    if (grpc_helpers.HAS_GRPC_GCP):
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds, None)
+    default.assert_called_once_with(scopes=["one", "two"])
+    if grpc_helpers.HAS_GRPC_GCP:
+        grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
     else:
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds)
+        grpc_secure_channel.assert_called_once_with(target, composite_creds)
 
 
-@mock.patch('grpc.composite_channel_credentials')
-@mock.patch('google.auth.credentials.with_scopes_if_required')
-@mock.patch('grpc.secure_channel')
-def test_create_channel_explicit(
-        grpc_secure_channel, auth_creds, composite_creds_call):
-    target = 'example.com:443'
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("google.auth.credentials.with_scopes_if_required")
+@mock.patch("grpc.secure_channel")
+def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
+    target = "example.com:443"
     composite_creds = composite_creds_call.return_value
 
-    channel = grpc_helpers.create_channel(
-        target, credentials=mock.sentinel.credentials)
+    channel = grpc_helpers.create_channel(target, credentials=mock.sentinel.credentials)
 
     auth_creds.assert_called_once_with(mock.sentinel.credentials, None)
     assert channel is grpc_secure_channel.return_value
-    if (grpc_helpers.HAS_GRPC_GCP):
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds, None)
+    if grpc_helpers.HAS_GRPC_GCP:
+        grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
     else:
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds)
+        grpc_secure_channel.assert_called_once_with(target, composite_creds)
 
 
-@mock.patch('grpc.composite_channel_credentials')
-@mock.patch('grpc.secure_channel')
-def test_create_channel_explicit_scoped(
-        grpc_secure_channel, composite_creds_call):
-    target = 'example.com:443'
-    scopes = ['1', '2']
+@mock.patch("grpc.composite_channel_credentials")
+@mock.patch("grpc.secure_channel")
+def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
+    target = "example.com:443"
+    scopes = ["1", "2"]
     composite_creds = composite_creds_call.return_value
 
-    credentials = mock.create_autospec(
-        google.auth.credentials.Scoped, instance=True)
+    credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
     credentials.requires_scopes = True
 
     channel = grpc_helpers.create_channel(
-        target,
-        credentials=credentials,
-        scopes=scopes)
+        target, credentials=credentials, scopes=scopes
+    )
 
     credentials.with_scopes.assert_called_once_with(scopes)
     assert channel is grpc_secure_channel.return_value
-    if (grpc_helpers.HAS_GRPC_GCP):
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds, None)
+    if grpc_helpers.HAS_GRPC_GCP:
+        grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
     else:
-        grpc_secure_channel.assert_called_once_with(
-            target, composite_creds)
+        grpc_secure_channel.assert_called_once_with(target, composite_creds)
 
 
-@pytest.mark.skipif(not grpc_helpers.HAS_GRPC_GCP,
-                    reason='grpc_gcp module not available')
-@mock.patch('grpc_gcp.secure_channel')
+@pytest.mark.skipif(
+    not grpc_helpers.HAS_GRPC_GCP, reason="grpc_gcp module not available"
+)
+@mock.patch("grpc_gcp.secure_channel")
 def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel):
-    target = 'example.com:443'
-    scopes = ['test_scope']
+    target = "example.com:443"
+    scopes = ["test_scope"]
 
-    credentials = mock.create_autospec(
-        google.auth.credentials.Scoped, instance=True)
+    credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
     credentials.requires_scopes = True
 
-    grpc_helpers.create_channel(
-        target,
-        credentials=credentials,
-        scopes=scopes)
+    grpc_helpers.create_channel(target, credentials=credentials, scopes=scopes)
     grpc_gcp_secure_channel.assert_called()
     credentials.with_scopes.assert_called_once_with(scopes)
 
 
-@pytest.mark.skipif(grpc_helpers.HAS_GRPC_GCP,
-                    reason='grpc_gcp module not available')
-@mock.patch('grpc.secure_channel')
+@pytest.mark.skipif(grpc_helpers.HAS_GRPC_GCP, reason="grpc_gcp module not available")
+@mock.patch("grpc.secure_channel")
 def test_create_channel_without_grpc_gcp(grpc_secure_channel):
-    target = 'example.com:443'
-    scopes = ['test_scope']
+    target = "example.com:443"
+    scopes = ["test_scope"]
 
-    credentials = mock.create_autospec(
-        google.auth.credentials.Scoped, instance=True)
+    credentials = mock.create_autospec(google.auth.credentials.Scoped, instance=True)
     credentials.requires_scopes = True
 
-    grpc_helpers.create_channel(
-        target,
-        credentials=credentials,
-        scopes=scopes)
+    grpc_helpers.create_channel(target, credentials=credentials, scopes=scopes)
     grpc_secure_channel.assert_called()
     credentials.with_scopes.assert_called_once_with(scopes)
 
 
 class TestChannelStub(object):
-
     def test_single_response(self):
         channel = grpc_helpers.ChannelStub()
         stub = operations_pb2.OperationsStub(channel)
-        expected_request = operations_pb2.GetOperationRequest(name='meep')
-        expected_response = operations_pb2.Operation(name='moop')
+        expected_request = operations_pb2.GetOperationRequest(name="meep")
+        expected_response = operations_pb2.Operation(name="moop")
 
         channel.GetOperation.response = expected_response
 
         response = stub.GetOperation(expected_request)
 
         assert response == expected_response
-        assert channel.requests == [('GetOperation', expected_request)]
+        assert channel.requests == [("GetOperation", expected_request)]
         assert channel.GetOperation.requests == [expected_request]
 
     def test_no_response(self):
         channel = grpc_helpers.ChannelStub()
         stub = operations_pb2.OperationsStub(channel)
-        expected_request = operations_pb2.GetOperationRequest(name='meep')
+        expected_request = operations_pb2.GetOperationRequest(name="meep")
 
         with pytest.raises(ValueError) as exc_info:
             stub.GetOperation(expected_request)
 
-        assert exc_info.match('GetOperation')
+        assert exc_info.match("GetOperation")
 
     def test_missing_method(self):
         channel = grpc_helpers.ChannelStub()
@@ -365,7 +344,7 @@
     def test_exception_response(self):
         channel = grpc_helpers.ChannelStub()
         stub = operations_pb2.OperationsStub(channel)
-        expected_request = operations_pb2.GetOperationRequest(name='meep')
+        expected_request = operations_pb2.GetOperationRequest(name="meep")
 
         channel.GetOperation.response = RuntimeError()
 
@@ -375,11 +354,10 @@
     def test_callable_response(self):
         channel = grpc_helpers.ChannelStub()
         stub = operations_pb2.OperationsStub(channel)
-        expected_request = operations_pb2.GetOperationRequest(name='meep')
-        expected_response = operations_pb2.Operation(name='moop')
+        expected_request = operations_pb2.GetOperationRequest(name="meep")
+        expected_response = operations_pb2.Operation(name="moop")
 
-        on_get_operation = mock.Mock(
-            spec=('__call__',), return_value=expected_response)
+        on_get_operation = mock.Mock(spec=("__call__",), return_value=expected_response)
 
         channel.GetOperation.response = on_get_operation
 
@@ -391,11 +369,11 @@
     def test_multiple_responses(self):
         channel = grpc_helpers.ChannelStub()
         stub = operations_pb2.OperationsStub(channel)
-        expected_request = operations_pb2.GetOperationRequest(name='meep')
+        expected_request = operations_pb2.GetOperationRequest(name="meep")
         expected_responses = [
-            operations_pb2.Operation(name='foo'),
-            operations_pb2.Operation(name='bar'),
-            operations_pb2.Operation(name='baz'),
+            operations_pb2.Operation(name="foo"),
+            operations_pb2.Operation(name="bar"),
+            operations_pb2.Operation(name="baz"),
         ]
 
         channel.GetOperation.responses = iter(expected_responses)
@@ -407,7 +385,7 @@
         assert response1 == expected_responses[0]
         assert response2 == expected_responses[1]
         assert response3 == expected_responses[2]
-        assert channel.requests == [('GetOperation', expected_request)] * 3
+        assert channel.requests == [("GetOperation", expected_request)] * 3
         assert channel.GetOperation.requests == [expected_request] * 3
 
         with pytest.raises(StopIteration):
@@ -425,45 +403,49 @@
     def test_call_info(self):
         channel = grpc_helpers.ChannelStub()
         stub = operations_pb2.OperationsStub(channel)
-        expected_request = operations_pb2.GetOperationRequest(name='meep')
-        expected_response = operations_pb2.Operation(name='moop')
-        expected_metadata = [('red', 'blue'), ('two', 'shoe')]
+        expected_request = operations_pb2.GetOperationRequest(name="meep")
+        expected_response = operations_pb2.Operation(name="moop")
+        expected_metadata = [("red", "blue"), ("two", "shoe")]
         expected_credentials = mock.sentinel.credentials
         channel.GetOperation.response = expected_response
 
         response = stub.GetOperation(
-            expected_request, timeout=42, metadata=expected_metadata,
-            credentials=expected_credentials)
+            expected_request,
+            timeout=42,
+            metadata=expected_metadata,
+            credentials=expected_credentials,
+        )
 
         assert response == expected_response
-        assert channel.requests == [('GetOperation', expected_request)]
+        assert channel.requests == [("GetOperation", expected_request)]
         assert channel.GetOperation.calls == [
-            (expected_request, 42, expected_metadata, expected_credentials)]
+            (expected_request, 42, expected_metadata, expected_credentials)
+        ]
 
     def test_unary_unary(self):
         channel = grpc_helpers.ChannelStub()
-        method_name = 'GetOperation'
+        method_name = "GetOperation"
         callable_stub = channel.unary_unary(method_name)
         assert callable_stub._method == method_name
         assert callable_stub._channel == channel
 
     def test_unary_stream(self):
         channel = grpc_helpers.ChannelStub()
-        method_name = 'GetOperation'
+        method_name = "GetOperation"
         callable_stub = channel.unary_stream(method_name)
         assert callable_stub._method == method_name
         assert callable_stub._channel == channel
 
     def test_stream_unary(self):
         channel = grpc_helpers.ChannelStub()
-        method_name = 'GetOperation'
+        method_name = "GetOperation"
         callable_stub = channel.stream_unary(method_name)
         assert callable_stub._method == method_name
         assert callable_stub._channel == channel
 
     def test_stream_stream(self):
         channel = grpc_helpers.ChannelStub()
-        method_name = 'GetOperation'
+        method_name = "GetOperation"
         callable_stub = channel.stream_stream(method_name)
         assert callable_stub._method == method_name
         assert callable_stub._channel == channel
diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py
index 211fea6..ceaec82 100644
--- a/tests/unit/test_operation.py
+++ b/tests/unit/test_operation.py
@@ -22,14 +22,13 @@
 from google.rpc import code_pb2
 from google.rpc import status_pb2
 
-TEST_OPERATION_NAME = 'test/operation'
+TEST_OPERATION_NAME = "test/operation"
 
 
 def make_operation_proto(
-        name=TEST_OPERATION_NAME, metadata=None, response=None,
-        error=None, **kwargs):
-    operation_proto = operations_pb2.Operation(
-        name=name, **kwargs)
+    name=TEST_OPERATION_NAME, metadata=None, response=None, error=None, **kwargs
+):
+    operation_proto = operations_pb2.Operation(name=name, **kwargs)
 
     if metadata is not None:
         operation_proto.metadata.Pack(metadata)
@@ -47,16 +46,16 @@
     if client_operations_responses is None:
         client_operations_responses = [make_operation_proto()]
 
-    refresh = mock.Mock(
-        spec=['__call__'], side_effect=client_operations_responses)
+    refresh = mock.Mock(spec=["__call__"], side_effect=client_operations_responses)
     refresh.responses = client_operations_responses
-    cancel = mock.Mock(spec=['__call__'])
+    cancel = mock.Mock(spec=["__call__"])
     operation_future = operation.Operation(
         client_operations_responses[0],
         refresh,
         cancel,
         result_type=struct_pb2.Struct,
-        metadata_type=struct_pb2.Struct)
+        metadata_type=struct_pb2.Struct,
+    )
 
     return operation_future, refresh, cancel
 
@@ -74,7 +73,8 @@
 def test_metadata():
     expected_metadata = struct_pb2.Struct()
     future, _, _ = make_operation_future(
-        [make_operation_proto(metadata=expected_metadata)])
+        [make_operation_proto(metadata=expected_metadata)]
+    )
 
     assert future.metadata == expected_metadata
 
@@ -84,8 +84,9 @@
         make_operation_proto(),
         # Second response indicates that the operation was cancelled.
         make_operation_proto(
-            done=True,
-            error=status_pb2.Status(code=code_pb2.CANCELLED))]
+            done=True, error=status_pb2.Status(code=code_pb2.CANCELLED)
+        ),
+    ]
     future, _, cancel = make_operation_future(responses)
 
     assert future.cancel()
@@ -102,7 +103,8 @@
     responses = [
         make_operation_proto(),
         # Second operation response includes the result.
-        make_operation_proto(done=True, response=expected_result)]
+        make_operation_proto(done=True, response=expected_result),
+    ]
     future, _, _ = make_operation_future(responses)
 
     result = future.result()
@@ -112,40 +114,42 @@
 
 
 def test_exception():
-    expected_exception = status_pb2.Status(message='meep')
+    expected_exception = status_pb2.Status(message="meep")
     responses = [
         make_operation_proto(),
         # Second operation response includes the error.
-        make_operation_proto(done=True, error=expected_exception)]
+        make_operation_proto(done=True, error=expected_exception),
+    ]
     future, _, _ = make_operation_future(responses)
 
     exception = future.exception()
 
-    assert expected_exception.message in '{!r}'.format(exception)
+    assert expected_exception.message in "{!r}".format(exception)
 
 
 def test_unexpected_result():
     responses = [
         make_operation_proto(),
         # Second operation response is done, but has not error or response.
-        make_operation_proto(done=True)]
+        make_operation_proto(done=True),
+    ]
     future, _, _ = make_operation_future(responses)
 
     exception = future.exception()
 
-    assert 'Unexpected state' in '{!r}'.format(exception)
+    assert "Unexpected state" in "{!r}".format(exception)
 
 
 def test__refresh_http():
-    api_request = mock.Mock(
-        return_value={'name': TEST_OPERATION_NAME, 'done': True})
+    api_request = mock.Mock(return_value={"name": TEST_OPERATION_NAME, "done": True})
 
     result = operation._refresh_http(api_request, TEST_OPERATION_NAME)
 
     assert result.name == TEST_OPERATION_NAME
     assert result.done is True
     api_request.assert_called_once_with(
-        method='GET', path='operations/{}'.format(TEST_OPERATION_NAME))
+        method="GET", path="operations/{}".format(TEST_OPERATION_NAME)
+    )
 
 
 def test__cancel_http():
@@ -154,16 +158,17 @@
     operation._cancel_http(api_request, TEST_OPERATION_NAME)
 
     api_request.assert_called_once_with(
-        method='POST', path='operations/{}:cancel'.format(TEST_OPERATION_NAME))
+        method="POST", path="operations/{}:cancel".format(TEST_OPERATION_NAME)
+    )
 
 
 def test_from_http_json():
-    operation_json = {'name': TEST_OPERATION_NAME, 'done': True}
+    operation_json = {"name": TEST_OPERATION_NAME, "done": True}
     api_request = mock.sentinel.api_request
 
     future = operation.from_http_json(
-        operation_json, api_request, struct_pb2.Struct,
-        metadata_type=struct_pb2.Struct)
+        operation_json, api_request, struct_pb2.Struct, metadata_type=struct_pb2.Struct
+    )
 
     assert future._result_type == struct_pb2.Struct
     assert future._metadata_type == struct_pb2.Struct
@@ -172,25 +177,23 @@
 
 
 def test__refresh_grpc():
-    operations_stub = mock.Mock(spec=['GetOperation'])
+    operations_stub = mock.Mock(spec=["GetOperation"])
     expected_result = make_operation_proto(done=True)
     operations_stub.GetOperation.return_value = expected_result
 
     result = operation._refresh_grpc(operations_stub, TEST_OPERATION_NAME)
 
     assert result == expected_result
-    expected_request = operations_pb2.GetOperationRequest(
-        name=TEST_OPERATION_NAME)
+    expected_request = operations_pb2.GetOperationRequest(name=TEST_OPERATION_NAME)
     operations_stub.GetOperation.assert_called_once_with(expected_request)
 
 
 def test__cancel_grpc():
-    operations_stub = mock.Mock(spec=['CancelOperation'])
+    operations_stub = mock.Mock(spec=["CancelOperation"])
 
     operation._cancel_grpc(operations_stub, TEST_OPERATION_NAME)
 
-    expected_request = operations_pb2.CancelOperationRequest(
-        name=TEST_OPERATION_NAME)
+    expected_request = operations_pb2.CancelOperationRequest(name=TEST_OPERATION_NAME)
     operations_stub.CancelOperation.assert_called_once_with(expected_request)
 
 
@@ -199,8 +202,11 @@
     operations_stub = mock.sentinel.operations_stub
 
     future = operation.from_grpc(
-        operation_proto, operations_stub, struct_pb2.Struct,
-        metadata_type=struct_pb2.Struct)
+        operation_proto,
+        operations_stub,
+        struct_pb2.Struct,
+        metadata_type=struct_pb2.Struct,
+    )
 
     assert future._result_type == struct_pb2.Struct
     assert future._metadata_type == struct_pb2.Struct
@@ -211,11 +217,15 @@
 def test_from_gapic():
     operation_proto = make_operation_proto(done=True)
     operations_client = mock.create_autospec(
-        operations_v1.OperationsClient, instance=True)
+        operations_v1.OperationsClient, instance=True
+    )
 
     future = operation.from_gapic(
-        operation_proto, operations_client, struct_pb2.Struct,
-        metadata_type=struct_pb2.Struct)
+        operation_proto,
+        operations_client,
+        struct_pb2.Struct,
+        metadata_type=struct_pb2.Struct,
+    )
 
     assert future._result_type == struct_pb2.Struct
     assert future._metadata_type == struct_pb2.Struct
diff --git a/tests/unit/test_page_iterator.py b/tests/unit/test_page_iterator.py
index 172be03..f2845fb 100644
--- a/tests/unit/test_page_iterator.py
+++ b/tests/unit/test_page_iterator.py
@@ -26,7 +26,6 @@
 
 
 class TestPage(object):
-
     def test_constructor(self):
         parent = mock.sentinel.parent
         item_to_value = mock.sentinel.item_to_value
@@ -46,7 +45,8 @@
         parent = mock.sentinel.parent
 
         item_to_value = mock.Mock(
-            side_effect=lambda iterator, value: value, spec=['__call__'])
+            side_effect=lambda iterator, value: value, spec=["__call__"]
+        )
 
         page = page_iterator.Page(parent, (10, 11, 12), item_to_value)
         page._remaining = 100
@@ -76,15 +76,15 @@
 
 
 class TestIterator(object):
-
     def test_constructor(self):
         client = mock.sentinel.client
         item_to_value = mock.sentinel.item_to_value
-        token = 'ab13nceor03'
+        token = "ab13nceor03"
         max_results = 1337
 
         iterator = PageIteratorImpl(
-            client, item_to_value, page_token=token, max_results=max_results)
+            client, item_to_value, page_token=token, max_results=max_results
+        )
 
         assert not iterator._started
         assert iterator.client is client
@@ -116,7 +116,8 @@
     def test__page_iter_increment(self):
         iterator = PageIteratorImpl(None, None)
         page = page_iterator.Page(
-            iterator, ('item',), page_iterator._item_to_value_identity)
+            iterator, ("item",), page_iterator._item_to_value_identity
+        )
         iterator._next_page = mock.Mock(side_effect=[page, None])
 
         assert iterator.num_results == 0
@@ -146,9 +147,11 @@
         # Make pages from mock responses
         parent = mock.sentinel.parent
         page1 = page_iterator.Page(
-            parent, (item1, item2), page_iterator._item_to_value_identity)
+            parent, (item1, item2), page_iterator._item_to_value_identity
+        )
         page2 = page_iterator.Page(
-            parent, (item3,), page_iterator._item_to_value_identity)
+            parent, (item3,), page_iterator._item_to_value_identity
+        )
 
         iterator = PageIteratorImpl(None, None)
         iterator._next_page = mock.Mock(side_effect=[page1, page2, None])
@@ -203,19 +206,18 @@
 
 
 class TestHTTPIterator(object):
-
     def test_constructor(self):
         client = mock.sentinel.client
-        path = '/foo'
+        path = "/foo"
         iterator = page_iterator.HTTPIterator(
-            client, mock.sentinel.api_request,
-            path, mock.sentinel.item_to_value)
+            client, mock.sentinel.api_request, path, mock.sentinel.item_to_value
+        )
 
         assert not iterator._started
         assert iterator.client is client
         assert iterator.path == path
         assert iterator.item_to_value is mock.sentinel.item_to_value
-        assert iterator._items_key == 'items'
+        assert iterator._items_key == "items"
         assert iterator.max_results is None
         assert iterator.extra_params == {}
         assert iterator._page_start == page_iterator._do_nothing_page_start
@@ -225,7 +227,7 @@
         assert iterator.num_results == 0
 
     def test_constructor_w_extra_param_collision(self):
-        extra_params = {'pageToken': 'val'}
+        extra_params = {"pageToken": "val"}
 
         with pytest.raises(ValueError):
             page_iterator.HTTPIterator(
@@ -233,16 +235,20 @@
                 mock.sentinel.api_request,
                 mock.sentinel.path,
                 mock.sentinel.item_to_value,
-                extra_params=extra_params)
+                extra_params=extra_params,
+            )
 
     def test_iterate(self):
-        path = '/foo'
-        item1 = {'name': '1'}
-        item2 = {'name': '2'}
-        api_request = mock.Mock(return_value={'items': [item1, item2]})
+        path = "/foo"
+        item1 = {"name": "1"}
+        item2 = {"name": "2"}
+        api_request = mock.Mock(return_value={"items": [item1, item2]})
         iterator = page_iterator.HTTPIterator(
-            mock.sentinel.client, api_request, path=path,
-            item_to_value=page_iterator._item_to_value_identity)
+            mock.sentinel.client,
+            api_request,
+            path=path,
+            item_to_value=page_iterator._item_to_value_identity,
+        )
 
         assert iterator.num_results == 0
 
@@ -259,15 +265,15 @@
         with pytest.raises(StopIteration):
             six.next(items_iter)
 
-        api_request.assert_called_once_with(
-            method='GET', path=path, query_params={})
+        api_request.assert_called_once_with(method="GET", path=path, query_params={})
 
     def test__has_next_page_new(self):
         iterator = page_iterator.HTTPIterator(
             mock.sentinel.client,
             mock.sentinel.api_request,
             mock.sentinel.path,
-            mock.sentinel.item_to_value)
+            mock.sentinel.item_to_value,
+        )
 
         # The iterator should *always* indicate that it has a next page
         # when created so that it can fetch the initial page.
@@ -278,7 +284,8 @@
             mock.sentinel.client,
             mock.sentinel.api_request,
             mock.sentinel.path,
-            mock.sentinel.item_to_value)
+            mock.sentinel.item_to_value,
+        )
 
         iterator.page_number = 1
 
@@ -291,7 +298,8 @@
             mock.sentinel.client,
             mock.sentinel.api_request,
             mock.sentinel.path,
-            mock.sentinel.item_to_value)
+            mock.sentinel.item_to_value,
+        )
 
         iterator.page_number = 1
         iterator.next_page_token = mock.sentinel.token
@@ -307,7 +315,8 @@
             mock.sentinel.path,
             mock.sentinel.item_to_value,
             max_results=3,
-            page_token=mock.sentinel.token)
+            page_token=mock.sentinel.token,
+        )
 
         iterator.page_number = 1
 
@@ -324,7 +333,8 @@
             mock.sentinel.path,
             mock.sentinel.item_to_value,
             max_results=3,
-            page_token=mock.sentinel.token)
+            page_token=mock.sentinel.token,
+        )
 
         iterator.page_number = 1
         iterator.num_results = 3
@@ -339,7 +349,8 @@
             mock.sentinel.client,
             mock.sentinel.api_request,
             mock.sentinel.path,
-            mock.sentinel.item_to_value)
+            mock.sentinel.item_to_value,
+        )
 
         assert iterator._get_query_params() == {}
 
@@ -348,11 +359,11 @@
             mock.sentinel.client,
             mock.sentinel.api_request,
             mock.sentinel.path,
-            mock.sentinel.item_to_value)
-        iterator.next_page_token = 'token'
+            mock.sentinel.item_to_value,
+        )
+        iterator.next_page_token = "token"
 
-        assert iterator._get_query_params() == {
-            'pageToken': iterator.next_page_token}
+        assert iterator._get_query_params() == {"pageToken": iterator.next_page_token}
 
     def test__get_query_params_w_max_results(self):
         max_results = 3
@@ -361,60 +372,64 @@
             mock.sentinel.api_request,
             mock.sentinel.path,
             mock.sentinel.item_to_value,
-            max_results=max_results)
+            max_results=max_results,
+        )
 
         iterator.num_results = 1
         local_max = max_results - iterator.num_results
 
-        assert iterator._get_query_params() == {
-            'maxResults': local_max}
+        assert iterator._get_query_params() == {"maxResults": local_max}
 
     def test__get_query_params_extra_params(self):
-        extra_params = {'key': 'val'}
+        extra_params = {"key": "val"}
         iterator = page_iterator.HTTPIterator(
             mock.sentinel.client,
             mock.sentinel.api_request,
             mock.sentinel.path,
             mock.sentinel.item_to_value,
-            extra_params=extra_params)
+            extra_params=extra_params,
+        )
 
         assert iterator._get_query_params() == extra_params
 
     def test__get_next_page_response_with_post(self):
-        path = '/foo'
-        page_response = {'items': ['one', 'two']}
+        path = "/foo"
+        page_response = {"items": ["one", "two"]}
         api_request = mock.Mock(return_value=page_response)
         iterator = page_iterator.HTTPIterator(
-            mock.sentinel.client, api_request, path=path,
-            item_to_value=page_iterator._item_to_value_identity)
-        iterator._HTTP_METHOD = 'POST'
+            mock.sentinel.client,
+            api_request,
+            path=path,
+            item_to_value=page_iterator._item_to_value_identity,
+        )
+        iterator._HTTP_METHOD = "POST"
 
         response = iterator._get_next_page_response()
 
         assert response == page_response
 
-        api_request.assert_called_once_with(
-            method='POST', path=path, data={})
+        api_request.assert_called_once_with(method="POST", path=path, data={})
 
     def test__get_next_page_bad_http_method(self):
         iterator = page_iterator.HTTPIterator(
             mock.sentinel.client,
             mock.sentinel.api_request,
             mock.sentinel.path,
-            mock.sentinel.item_to_value)
-        iterator._HTTP_METHOD = 'NOT-A-VERB'
+            mock.sentinel.item_to_value,
+        )
+        iterator._HTTP_METHOD = "NOT-A-VERB"
 
         with pytest.raises(ValueError):
             iterator._get_next_page_response()
 
 
 class TestGRPCIterator(object):
-
     def test_constructor(self):
         client = mock.sentinel.client
-        items_field = 'items'
+        items_field = "items"
         iterator = page_iterator.GRPCIterator(
-            client, mock.sentinel.method, mock.sentinel.request, items_field)
+            client, mock.sentinel.method, mock.sentinel.request, items_field
+        )
 
         assert not iterator._started
         assert iterator.client is client
@@ -423,10 +438,14 @@
         assert iterator._method == mock.sentinel.method
         assert iterator._request == mock.sentinel.request
         assert iterator._items_field == items_field
-        assert (iterator._request_token_field ==
-                page_iterator.GRPCIterator._DEFAULT_REQUEST_TOKEN_FIELD)
-        assert (iterator._response_token_field ==
-                page_iterator.GRPCIterator._DEFAULT_RESPONSE_TOKEN_FIELD)
+        assert (
+            iterator._request_token_field
+            == page_iterator.GRPCIterator._DEFAULT_REQUEST_TOKEN_FIELD
+        )
+        assert (
+            iterator._response_token_field
+            == page_iterator.GRPCIterator._DEFAULT_RESPONSE_TOKEN_FIELD
+        )
         # Changing attributes.
         assert iterator.page_number == 0
         assert iterator.next_page_token is None
@@ -434,15 +453,19 @@
 
     def test_constructor_options(self):
         client = mock.sentinel.client
-        items_field = 'items'
-        request_field = 'request'
-        response_field = 'response'
+        items_field = "items"
+        request_field = "request"
+        response_field = "response"
         iterator = page_iterator.GRPCIterator(
-            client, mock.sentinel.method, mock.sentinel.request, items_field,
+            client,
+            mock.sentinel.method,
+            mock.sentinel.request,
+            items_field,
             item_to_value=mock.sentinel.item_to_value,
             request_token_field=request_field,
             response_token_field=response_field,
-            max_results=42)
+            max_results=42,
+        )
 
         assert iterator.client is client
         assert iterator.max_results == 42
@@ -454,46 +477,49 @@
         assert iterator._response_token_field == response_field
 
     def test_iterate(self):
-        request = mock.Mock(spec=['page_token'], page_token=None)
-        response1 = mock.Mock(items=['a', 'b'], next_page_token='1')
-        response2 = mock.Mock(items=['c'], next_page_token='2')
-        response3 = mock.Mock(items=['d'], next_page_token='')
+        request = mock.Mock(spec=["page_token"], page_token=None)
+        response1 = mock.Mock(items=["a", "b"], next_page_token="1")
+        response2 = mock.Mock(items=["c"], next_page_token="2")
+        response3 = mock.Mock(items=["d"], next_page_token="")
         method = mock.Mock(side_effect=[response1, response2, response3])
         iterator = page_iterator.GRPCIterator(
-            mock.sentinel.client, method, request, 'items')
+            mock.sentinel.client, method, request, "items"
+        )
 
         assert iterator.num_results == 0
 
         items = list(iterator)
-        assert items == ['a', 'b', 'c', 'd']
+        assert items == ["a", "b", "c", "d"]
 
         method.assert_called_with(request)
         assert method.call_count == 3
-        assert request.page_token == '2'
+        assert request.page_token == "2"
 
     def test_iterate_with_max_results(self):
-        request = mock.Mock(spec=['page_token'], page_token=None)
-        response1 = mock.Mock(items=['a', 'b'], next_page_token='1')
-        response2 = mock.Mock(items=['c'], next_page_token='2')
-        response3 = mock.Mock(items=['d'], next_page_token='')
+        request = mock.Mock(spec=["page_token"], page_token=None)
+        response1 = mock.Mock(items=["a", "b"], next_page_token="1")
+        response2 = mock.Mock(items=["c"], next_page_token="2")
+        response3 = mock.Mock(items=["d"], next_page_token="")
         method = mock.Mock(side_effect=[response1, response2, response3])
         iterator = page_iterator.GRPCIterator(
-            mock.sentinel.client, method, request, 'items', max_results=3)
+            mock.sentinel.client, method, request, "items", max_results=3
+        )
 
         assert iterator.num_results == 0
 
         items = list(iterator)
 
-        assert items == ['a', 'b', 'c']
+        assert items == ["a", "b", "c"]
         assert iterator.num_results == 3
 
         method.assert_called_with(request)
         assert method.call_count == 2
-        assert request.page_token is '1'
+        assert request.page_token is "1"
 
 
 class GAXPageIterator(object):
     """Fake object that matches gax.PageIterator"""
+
     def __init__(self, pages, page_token=None):
         self._pages = iter(pages)
         self.page_token = page_token
@@ -505,15 +531,15 @@
 
 
 class TestGAXIterator(object):
-
     def test_constructor(self):
         client = mock.sentinel.client
-        token = 'zzzyy78kl'
+        token = "zzzyy78kl"
         page_iter = GAXPageIterator((), page_token=token)
         item_to_value = page_iterator._item_to_value_identity
         max_results = 1337
         iterator = page_iterator._GAXIterator(
-            client, page_iter, item_to_value, max_results=max_results)
+            client, page_iter, item_to_value, max_results=max_results
+        )
 
         assert not iterator._started
         assert iterator.client is client
@@ -527,12 +553,11 @@
 
     def test__next_page(self):
         page_items = (29, 31)
-        page_token = '2sde98ds2s0hh'
+        page_token = "2sde98ds2s0hh"
         page_iter = GAXPageIterator([page_items], page_token=page_token)
         iterator = page_iterator._GAXIterator(
-            mock.sentinel.client,
-            page_iter,
-            page_iterator._item_to_value_identity)
+            mock.sentinel.client, page_iter, page_iterator._item_to_value_identity
+        )
 
         page = iterator._next_page()
 
diff --git a/tests/unit/test_path_template.py b/tests/unit/test_path_template.py
index 267a048..4c8a7c5 100644
--- a/tests/unit/test_path_template.py
+++ b/tests/unit/test_path_template.py
@@ -20,71 +20,96 @@
 from google.api_core import path_template
 
 
-@pytest.mark.parametrize('tmpl, args, kwargs, expected_result', [
-    # Basic positional params
-    ['/v1/*', ['a'], {}, '/v1/a'],
-    ['/v1/**', ['a/b'], {}, '/v1/a/b'],
-    ['/v1/*/*', ['a', 'b'], {}, '/v1/a/b'],
-    ['/v1/*/*/**', ['a', 'b', 'c/d'], {}, '/v1/a/b/c/d'],
-    # Basic named params
-    ['/v1/{name}', [], {'name': 'parent'}, '/v1/parent'],
-    ['/v1/{name=**}', [], {'name': 'parent/child'}, '/v1/parent/child'],
-    # Named params with a sub-template
-    ['/v1/{name=parent/*}', [], {'name': 'parent/child'}, '/v1/parent/child'],
-    ['/v1/{name=parent/**}', [], {'name': 'parent/child/object'},
-     '/v1/parent/child/object'],
-    # Combining positional and named params
-    ['/v1/*/{name}', ['a'], {'name': 'parent'}, '/v1/a/parent'],
-    ['/v1/{name}/*', ['a'], {'name': 'parent'}, '/v1/parent/a'],
-    ['/v1/{parent}/*/{child}/*', ['a', 'b'],
-     {'parent': 'thor', 'child': 'thorson'}, '/v1/thor/a/thorson/b'],
-    ['/v1/{name}/**', ['a/b'], {'name': 'parent'}, '/v1/parent/a/b'],
-    # Combining positional and named params with sub-templates.
-    ['/v1/{name=parent/*}/*', ['a'], {'name': 'parent/child'},
-     '/v1/parent/child/a'],
-    ['/v1/*/{name=parent/**}', ['a'], {'name': 'parent/child/object'},
-     '/v1/a/parent/child/object'],
-])
+@pytest.mark.parametrize(
+    "tmpl, args, kwargs, expected_result",
+    [
+        # Basic positional params
+        ["/v1/*", ["a"], {}, "/v1/a"],
+        ["/v1/**", ["a/b"], {}, "/v1/a/b"],
+        ["/v1/*/*", ["a", "b"], {}, "/v1/a/b"],
+        ["/v1/*/*/**", ["a", "b", "c/d"], {}, "/v1/a/b/c/d"],
+        # Basic named params
+        ["/v1/{name}", [], {"name": "parent"}, "/v1/parent"],
+        ["/v1/{name=**}", [], {"name": "parent/child"}, "/v1/parent/child"],
+        # Named params with a sub-template
+        ["/v1/{name=parent/*}", [], {"name": "parent/child"}, "/v1/parent/child"],
+        [
+            "/v1/{name=parent/**}",
+            [],
+            {"name": "parent/child/object"},
+            "/v1/parent/child/object",
+        ],
+        # Combining positional and named params
+        ["/v1/*/{name}", ["a"], {"name": "parent"}, "/v1/a/parent"],
+        ["/v1/{name}/*", ["a"], {"name": "parent"}, "/v1/parent/a"],
+        [
+            "/v1/{parent}/*/{child}/*",
+            ["a", "b"],
+            {"parent": "thor", "child": "thorson"},
+            "/v1/thor/a/thorson/b",
+        ],
+        ["/v1/{name}/**", ["a/b"], {"name": "parent"}, "/v1/parent/a/b"],
+        # Combining positional and named params with sub-templates.
+        [
+            "/v1/{name=parent/*}/*",
+            ["a"],
+            {"name": "parent/child"},
+            "/v1/parent/child/a",
+        ],
+        [
+            "/v1/*/{name=parent/**}",
+            ["a"],
+            {"name": "parent/child/object"},
+            "/v1/a/parent/child/object",
+        ],
+    ],
+)
 def test_expand_success(tmpl, args, kwargs, expected_result):
     result = path_template.expand(tmpl, *args, **kwargs)
     assert result == expected_result
     assert path_template.validate(tmpl, result)
 
 
-@pytest.mark.parametrize('tmpl, args, kwargs, exc_match', [
-    # Missing positional arg.
-    ['v1/*', [], {}, 'Positional'],
-    # Missing named arg.
-    ['v1/{name}', [], {}, 'Named'],
-])
+@pytest.mark.parametrize(
+    "tmpl, args, kwargs, exc_match",
+    [
+        # Missing positional arg.
+        ["v1/*", [], {}, "Positional"],
+        # Missing named arg.
+        ["v1/{name}", [], {}, "Named"],
+    ],
+)
 def test_expanded_failure(tmpl, args, kwargs, exc_match):
     with pytest.raises(ValueError, match=exc_match):
         path_template.expand(tmpl, *args, **kwargs)
 
 
-@pytest.mark.parametrize('tmpl, path', [
-    # Single segment template, but multi segment value
-    ['v1/*', 'v1/a/b'],
-    ['v1/*/*', 'v1/a/b/c'],
-    # Single segement named template, but multi segment value
-    ['v1/{name}', 'v1/a/b'],
-    ['v1/{name}/{value}', 'v1/a/b/c'],
-    # Named value with a sub-template but invalid value
-    ['v1/{name=parent/*}', 'v1/grandparent/child'],
-])
+@pytest.mark.parametrize(
+    "tmpl, path",
+    [
+        # Single segment template, but multi segment value
+        ["v1/*", "v1/a/b"],
+        ["v1/*/*", "v1/a/b/c"],
+        # Single segement named template, but multi segment value
+        ["v1/{name}", "v1/a/b"],
+        ["v1/{name}/{value}", "v1/a/b/c"],
+        # Named value with a sub-template but invalid value
+        ["v1/{name=parent/*}", "v1/grandparent/child"],
+    ],
+)
 def test_validate_failure(tmpl, path):
     assert not path_template.validate(tmpl, path)
 
 
 def test__expand_variable_match_unexpected():
-    match = mock.Mock(spec=['group'])
+    match = mock.Mock(spec=["group"])
     match.group.return_value = None
-    with pytest.raises(ValueError, match='Unknown'):
+    with pytest.raises(ValueError, match="Unknown"):
         path_template._expand_variable_match([], {}, match)
 
 
 def test__replace_variable_with_pattern():
-    match = mock.Mock(spec=['group'])
+    match = mock.Mock(spec=["group"])
     match.group.return_value = None
-    with pytest.raises(ValueError, match='Unknown'):
+    with pytest.raises(ValueError, match="Unknown"):
         path_template._replace_variable_with_pattern(match)
diff --git a/tests/unit/test_protobuf_helpers.py b/tests/unit/test_protobuf_helpers.py
index 83e078b..ec761a0 100644
--- a/tests/unit/test_protobuf_helpers.py
+++ b/tests/unit/test_protobuf_helpers.py
@@ -48,27 +48,26 @@
 
 def test_check_protobuf_helpers_ok():
     assert protobuf_helpers.check_oneof() is None
-    assert protobuf_helpers.check_oneof(foo='bar') is None
-    assert protobuf_helpers.check_oneof(foo='bar', baz=None) is None
-    assert protobuf_helpers.check_oneof(foo=None, baz='bacon') is None
-    assert (protobuf_helpers.check_oneof(foo='bar', spam=None, eggs=None)
-            is None)
+    assert protobuf_helpers.check_oneof(foo="bar") is None
+    assert protobuf_helpers.check_oneof(foo="bar", baz=None) is None
+    assert protobuf_helpers.check_oneof(foo=None, baz="bacon") is None
+    assert protobuf_helpers.check_oneof(foo="bar", spam=None, eggs=None) is None
 
 
 def test_check_protobuf_helpers_failures():
     with pytest.raises(ValueError):
-        protobuf_helpers.check_oneof(foo='bar', spam='eggs')
+        protobuf_helpers.check_oneof(foo="bar", spam="eggs")
     with pytest.raises(ValueError):
-        protobuf_helpers.check_oneof(foo='bar', baz='bacon', spam='eggs')
+        protobuf_helpers.check_oneof(foo="bar", baz="bacon", spam="eggs")
     with pytest.raises(ValueError):
-        protobuf_helpers.check_oneof(foo='bar', spam=0, eggs=None)
+        protobuf_helpers.check_oneof(foo="bar", spam=0, eggs=None)
 
 
 def test_get_messages():
     answer = protobuf_helpers.get_messages(date_pb2)
 
     # Ensure that Date was exported properly.
-    assert answer['Date'] is date_pb2.Date
+    assert answer["Date"] is date_pb2.Date
 
     # Ensure that no non-Message objects were exported.
     for value in answer.values():
@@ -77,171 +76,170 @@
 
 def test_get_dict_absent():
     with pytest.raises(KeyError):
-        assert protobuf_helpers.get({}, 'foo')
+        assert protobuf_helpers.get({}, "foo")
 
 
 def test_get_dict_present():
-    assert protobuf_helpers.get({'foo': 'bar'}, 'foo') == 'bar'
+    assert protobuf_helpers.get({"foo": "bar"}, "foo") == "bar"
 
 
 def test_get_dict_default():
-    assert protobuf_helpers.get({}, 'foo', default='bar') == 'bar'
+    assert protobuf_helpers.get({}, "foo", default="bar") == "bar"
 
 
 def test_get_dict_nested():
-    assert protobuf_helpers.get({'foo': {'bar': 'baz'}}, 'foo.bar') == 'baz'
+    assert protobuf_helpers.get({"foo": {"bar": "baz"}}, "foo.bar") == "baz"
 
 
 def test_get_dict_nested_default():
-    assert protobuf_helpers.get({}, 'foo.baz', default='bacon') == 'bacon'
-    assert (
-        protobuf_helpers.get({'foo': {}}, 'foo.baz', default='bacon') ==
-        'bacon')
+    assert protobuf_helpers.get({}, "foo.baz", default="bacon") == "bacon"
+    assert protobuf_helpers.get({"foo": {}}, "foo.baz", default="bacon") == "bacon"
 
 
 def test_get_msg_sentinel():
     msg = timestamp_pb2.Timestamp()
     with pytest.raises(KeyError):
-        assert protobuf_helpers.get(msg, 'foo')
+        assert protobuf_helpers.get(msg, "foo")
 
 
 def test_get_msg_present():
     msg = timestamp_pb2.Timestamp(seconds=42)
-    assert protobuf_helpers.get(msg, 'seconds') == 42
+    assert protobuf_helpers.get(msg, "seconds") == 42
 
 
 def test_get_msg_default():
     msg = timestamp_pb2.Timestamp()
-    assert protobuf_helpers.get(msg, 'foo', default='bar') == 'bar'
+    assert protobuf_helpers.get(msg, "foo", default="bar") == "bar"
 
 
 def test_invalid_object():
     with pytest.raises(TypeError):
-        protobuf_helpers.get(object(), 'foo', 'bar')
+        protobuf_helpers.get(object(), "foo", "bar")
 
 
 def test_set_dict():
     mapping = {}
-    protobuf_helpers.set(mapping, 'foo', 'bar')
-    assert mapping == {'foo': 'bar'}
+    protobuf_helpers.set(mapping, "foo", "bar")
+    assert mapping == {"foo": "bar"}
 
 
 def test_set_msg():
     msg = timestamp_pb2.Timestamp()
-    protobuf_helpers.set(msg, 'seconds', 42)
+    protobuf_helpers.set(msg, "seconds", 42)
     assert msg.seconds == 42
 
 
 def test_set_dict_nested():
     mapping = {}
-    protobuf_helpers.set(mapping, 'foo.bar', 'baz')
-    assert mapping == {'foo': {'bar': 'baz'}}
+    protobuf_helpers.set(mapping, "foo.bar", "baz")
+    assert mapping == {"foo": {"bar": "baz"}}
 
 
 def test_set_invalid_object():
     with pytest.raises(TypeError):
-        protobuf_helpers.set(object(), 'foo', 'bar')
+        protobuf_helpers.set(object(), "foo", "bar")
 
 
 def test_set_list():
     list_ops_response = operations_pb2.ListOperationsResponse()
 
-    protobuf_helpers.set(list_ops_response, 'operations', [
-        {'name': 'foo'},
-        operations_pb2.Operation(name='bar'),
-    ])
+    protobuf_helpers.set(
+        list_ops_response,
+        "operations",
+        [{"name": "foo"}, operations_pb2.Operation(name="bar")],
+    )
 
     assert len(list_ops_response.operations) == 2
 
     for operation in list_ops_response.operations:
         assert isinstance(operation, operations_pb2.Operation)
 
-    assert list_ops_response.operations[0].name == 'foo'
-    assert list_ops_response.operations[1].name == 'bar'
+    assert list_ops_response.operations[0].name == "foo"
+    assert list_ops_response.operations[1].name == "bar"
 
 
 def test_set_list_clear_existing():
     list_ops_response = operations_pb2.ListOperationsResponse(
-        operations=[{'name': 'baz'}],
+        operations=[{"name": "baz"}]
     )
 
-    protobuf_helpers.set(list_ops_response, 'operations', [
-        {'name': 'foo'},
-        operations_pb2.Operation(name='bar'),
-    ])
+    protobuf_helpers.set(
+        list_ops_response,
+        "operations",
+        [{"name": "foo"}, operations_pb2.Operation(name="bar")],
+    )
 
     assert len(list_ops_response.operations) == 2
     for operation in list_ops_response.operations:
         assert isinstance(operation, operations_pb2.Operation)
-    assert list_ops_response.operations[0].name == 'foo'
-    assert list_ops_response.operations[1].name == 'bar'
+    assert list_ops_response.operations[0].name == "foo"
+    assert list_ops_response.operations[1].name == "bar"
 
 
 def test_set_msg_with_msg_field():
     rule = http_pb2.HttpRule()
-    pattern = http_pb2.CustomHttpPattern(kind='foo', path='bar')
+    pattern = http_pb2.CustomHttpPattern(kind="foo", path="bar")
 
-    protobuf_helpers.set(rule, 'custom', pattern)
+    protobuf_helpers.set(rule, "custom", pattern)
 
-    assert rule.custom.kind == 'foo'
-    assert rule.custom.path == 'bar'
+    assert rule.custom.kind == "foo"
+    assert rule.custom.path == "bar"
 
 
 def test_set_msg_with_dict_field():
     rule = http_pb2.HttpRule()
-    pattern = {'kind': 'foo', 'path': 'bar'}
+    pattern = {"kind": "foo", "path": "bar"}
 
-    protobuf_helpers.set(rule, 'custom', pattern)
+    protobuf_helpers.set(rule, "custom", pattern)
 
-    assert rule.custom.kind == 'foo'
-    assert rule.custom.path == 'bar'
+    assert rule.custom.kind == "foo"
+    assert rule.custom.path == "bar"
 
 
 def test_set_msg_nested_key():
-    rule = http_pb2.HttpRule(
-        custom=http_pb2.CustomHttpPattern(kind='foo', path='bar'))
+    rule = http_pb2.HttpRule(custom=http_pb2.CustomHttpPattern(kind="foo", path="bar"))
 
-    protobuf_helpers.set(rule, 'custom.kind', 'baz')
+    protobuf_helpers.set(rule, "custom.kind", "baz")
 
-    assert rule.custom.kind == 'baz'
-    assert rule.custom.path == 'bar'
+    assert rule.custom.kind == "baz"
+    assert rule.custom.path == "bar"
 
 
 def test_setdefault_dict_unset():
     mapping = {}
-    protobuf_helpers.setdefault(mapping, 'foo', 'bar')
-    assert mapping == {'foo': 'bar'}
+    protobuf_helpers.setdefault(mapping, "foo", "bar")
+    assert mapping == {"foo": "bar"}
 
 
 def test_setdefault_dict_falsy():
-    mapping = {'foo': None}
-    protobuf_helpers.setdefault(mapping, 'foo', 'bar')
-    assert mapping == {'foo': 'bar'}
+    mapping = {"foo": None}
+    protobuf_helpers.setdefault(mapping, "foo", "bar")
+    assert mapping == {"foo": "bar"}
 
 
 def test_setdefault_dict_truthy():
-    mapping = {'foo': 'bar'}
-    protobuf_helpers.setdefault(mapping, 'foo', 'baz')
-    assert mapping == {'foo': 'bar'}
+    mapping = {"foo": "bar"}
+    protobuf_helpers.setdefault(mapping, "foo", "baz")
+    assert mapping == {"foo": "bar"}
 
 
 def test_setdefault_pb2_falsy():
     operation = operations_pb2.Operation()
-    protobuf_helpers.setdefault(operation, 'name', 'foo')
-    assert operation.name == 'foo'
+    protobuf_helpers.setdefault(operation, "name", "foo")
+    assert operation.name == "foo"
 
 
 def test_setdefault_pb2_truthy():
-    operation = operations_pb2.Operation(name='bar')
-    protobuf_helpers.setdefault(operation, 'name', 'foo')
-    assert operation.name == 'bar'
+    operation = operations_pb2.Operation(name="bar")
+    protobuf_helpers.setdefault(operation, "name", "foo")
+    assert operation.name == "bar"
 
 
 def test_field_mask_invalid_args():
     with pytest.raises(ValueError):
-        protobuf_helpers.field_mask('foo', any_pb2.Any())
+        protobuf_helpers.field_mask("foo", any_pb2.Any())
     with pytest.raises(ValueError):
-        protobuf_helpers.field_mask(any_pb2.Any(), 'bar')
+        protobuf_helpers.field_mask(any_pb2.Any(), "bar")
     with pytest.raises(ValueError):
         protobuf_helpers.field_mask(any_pb2.Any(), operations_pb2.Operation())
 
@@ -257,16 +255,12 @@
     modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
     assert protobuf_helpers.field_mask(original, modified).paths == []
 
-    original = struct_pb2.ListValue(
-            values=[struct_pb2.Value(number_value=1.0)])
-    modified = struct_pb2.ListValue(
-            values=[struct_pb2.Value(number_value=1.0)])
+    original = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0)])
+    modified = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0)])
     assert protobuf_helpers.field_mask(original, modified).paths == []
 
-    original = struct_pb2.Struct(
-            fields={'bar': struct_pb2.Value(number_value=1.0)})
-    modified = struct_pb2.Struct(
-            fields={'bar': struct_pb2.Value(number_value=1.0)})
+    original = struct_pb2.Struct(fields={"bar": struct_pb2.Value(number_value=1.0)})
+    modified = struct_pb2.Struct(fields={"bar": struct_pb2.Value(number_value=1.0)})
     assert protobuf_helpers.field_mask(original, modified).paths == []
 
 
@@ -309,151 +303,149 @@
 
 
 def test_field_mask_singular_field_diffs():
-    original = type_pb2.Type(name='name')
+    original = type_pb2.Type(name="name")
     modified = type_pb2.Type()
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['name'])
+    assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
 
-    original = type_pb2.Type(name='name')
+    original = type_pb2.Type(name="name")
     modified = type_pb2.Type()
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['name'])
+    assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
 
     original = None
-    modified = type_pb2.Type(name='name')
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['name'])
+    modified = type_pb2.Type(name="name")
+    assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
 
-    original = type_pb2.Type(name='name')
+    original = type_pb2.Type(name="name")
     modified = None
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['name'])
+    assert protobuf_helpers.field_mask(original, modified).paths == ["name"]
 
 
 def test_field_mask_message_diffs():
     original = type_pb2.Type()
-    modified = type_pb2.Type(source_context=source_context_pb2.SourceContext(
-                            file_name='name'))
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['source_context.file_name'])
+    modified = type_pb2.Type(
+        source_context=source_context_pb2.SourceContext(file_name="name")
+    )
+    assert protobuf_helpers.field_mask(original, modified).paths == [
+        "source_context.file_name"
+    ]
 
-    original = type_pb2.Type(source_context=source_context_pb2.SourceContext(
-                             file_name='name'))
+    original = type_pb2.Type(
+        source_context=source_context_pb2.SourceContext(file_name="name")
+    )
     modified = type_pb2.Type()
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['source_context'])
+    assert protobuf_helpers.field_mask(original, modified).paths == ["source_context"]
 
-    original = type_pb2.Type(source_context=source_context_pb2.SourceContext(
-                             file_name='name'))
-    modified = type_pb2.Type(source_context=source_context_pb2.SourceContext(
-                             file_name='other_name'))
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['source_context.file_name'])
+    original = type_pb2.Type(
+        source_context=source_context_pb2.SourceContext(file_name="name")
+    )
+    modified = type_pb2.Type(
+        source_context=source_context_pb2.SourceContext(file_name="other_name")
+    )
+    assert protobuf_helpers.field_mask(original, modified).paths == [
+        "source_context.file_name"
+    ]
 
     original = None
-    modified = type_pb2.Type(source_context=source_context_pb2.SourceContext(
-                             file_name='name'))
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['source_context.file_name'])
+    modified = type_pb2.Type(
+        source_context=source_context_pb2.SourceContext(file_name="name")
+    )
+    assert protobuf_helpers.field_mask(original, modified).paths == [
+        "source_context.file_name"
+    ]
 
-    original = type_pb2.Type(source_context=source_context_pb2.SourceContext(
-                             file_name='name'))
+    original = type_pb2.Type(
+        source_context=source_context_pb2.SourceContext(file_name="name")
+    )
     modified = None
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['source_context'])
+    assert protobuf_helpers.field_mask(original, modified).paths == ["source_context"]
 
 
 def test_field_mask_wrapper_type_diffs():
     original = color_pb2.Color()
     modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
-    assert protobuf_helpers.field_mask(original, modified).paths == ['alpha']
+    assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
 
     original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
     modified = color_pb2.Color()
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['alpha'])
+    assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
 
     original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
     modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=2.0))
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['alpha'])
+    assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
 
     original = None
     modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=2.0))
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['alpha'])
+    assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
 
     original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
     modified = None
-    assert (protobuf_helpers.field_mask(original, modified).paths ==
-            ['alpha'])
+    assert protobuf_helpers.field_mask(original, modified).paths == ["alpha"]
 
 
 def test_field_mask_repeated_diffs():
     original = struct_pb2.ListValue()
-    modified = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
-                                    struct_pb2.Value(number_value=2.0)])
-    assert protobuf_helpers.field_mask(original, modified).paths == ['values']
+    modified = struct_pb2.ListValue(
+        values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+    )
+    assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
 
-    original = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
-                                    struct_pb2.Value(number_value=2.0)])
+    original = struct_pb2.ListValue(
+        values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+    )
     modified = struct_pb2.ListValue()
-    assert protobuf_helpers.field_mask(original, modified).paths == ['values']
+    assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
 
     original = None
-    modified = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
-                                    struct_pb2.Value(number_value=2.0)])
-    assert protobuf_helpers.field_mask(original, modified).paths == ['values']
+    modified = struct_pb2.ListValue(
+        values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+    )
+    assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
 
-    original = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
-                                    struct_pb2.Value(number_value=2.0)])
+    original = struct_pb2.ListValue(
+        values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+    )
     modified = None
-    assert protobuf_helpers.field_mask(original, modified).paths == ['values']
+    assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
 
-    original = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=1.0),
-                                    struct_pb2.Value(number_value=2.0)])
-    modified = struct_pb2.ListValue(values=[struct_pb2.Value(number_value=2.0),
-                                    struct_pb2.Value(number_value=1.0)])
-    assert protobuf_helpers.field_mask(original, modified).paths == ['values']
+    original = struct_pb2.ListValue(
+        values=[struct_pb2.Value(number_value=1.0), struct_pb2.Value(number_value=2.0)]
+    )
+    modified = struct_pb2.ListValue(
+        values=[struct_pb2.Value(number_value=2.0), struct_pb2.Value(number_value=1.0)]
+    )
+    assert protobuf_helpers.field_mask(original, modified).paths == ["values"]
 
 
 def test_field_mask_map_diffs():
     original = struct_pb2.Struct()
-    modified = struct_pb2.Struct(
-            fields={'foo': struct_pb2.Value(number_value=1.0)})
-    assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
+    modified = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+    assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
 
-    original = struct_pb2.Struct(
-            fields={'foo': struct_pb2.Value(number_value=1.0)})
+    original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
     modified = struct_pb2.Struct()
-    assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
+    assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
 
     original = None
-    modified = struct_pb2.Struct(
-            fields={'foo': struct_pb2.Value(number_value=1.0)})
-    assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
+    modified = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+    assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
 
-    original = struct_pb2.Struct(
-            fields={'foo': struct_pb2.Value(number_value=1.0)})
+    original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
     modified = None
-    assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
+    assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
 
-    original = struct_pb2.Struct(
-            fields={'foo': struct_pb2.Value(number_value=1.0)})
-    modified = struct_pb2.Struct(
-            fields={'foo': struct_pb2.Value(number_value=2.0)})
-    assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
+    original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+    modified = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=2.0)})
+    assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
 
-    original = struct_pb2.Struct(
-            fields={'foo': struct_pb2.Value(number_value=1.0)})
-    modified = struct_pb2.Struct(
-            fields={'bar': struct_pb2.Value(number_value=1.0)})
-    assert protobuf_helpers.field_mask(original, modified).paths == ['fields']
+    original = struct_pb2.Struct(fields={"foo": struct_pb2.Value(number_value=1.0)})
+    modified = struct_pb2.Struct(fields={"bar": struct_pb2.Value(number_value=1.0)})
+    assert protobuf_helpers.field_mask(original, modified).paths == ["fields"]
 
 
 def test_field_mask_different_level_diffs():
     original = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=1.0))
-    modified = color_pb2.Color(
-            alpha=wrappers_pb2.FloatValue(value=2.0), red=1.0)
-    assert (sorted(protobuf_helpers.field_mask(original, modified).paths) ==
-            ['alpha', 'red'])
+    modified = color_pb2.Color(alpha=wrappers_pb2.FloatValue(value=2.0), red=1.0)
+    assert sorted(protobuf_helpers.field_mask(original, modified).paths) == [
+        "alpha",
+        "red",
+    ]
diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py
index e7485ba..013b6ad 100644
--- a/tests/unit/test_retry.py
+++ b/tests/unit/test_retry.py
@@ -39,27 +39,27 @@
 
 
 def test_if_transient_error():
-    assert retry.if_transient_error(exceptions.InternalServerError(''))
-    assert retry.if_transient_error(exceptions.TooManyRequests(''))
-    assert not retry.if_transient_error(exceptions.InvalidArgument(''))
+    assert retry.if_transient_error(exceptions.InternalServerError(""))
+    assert retry.if_transient_error(exceptions.TooManyRequests(""))
+    assert not retry.if_transient_error(exceptions.InvalidArgument(""))
 
 
 # Make uniform return half of its maximum, which will be the calculated
 # sleep time.
-@mock.patch('random.uniform', autospec=True, side_effect=lambda m, n: n/2.0)
+@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
 def test_exponential_sleep_generator_base_2(uniform):
-    gen = retry.exponential_sleep_generator(
-        1, 60, multiplier=2)
+    gen = retry.exponential_sleep_generator(1, 60, multiplier=2)
 
     result = list(itertools.islice(gen, 8))
     assert result == [1, 2, 4, 8, 16, 32, 60, 60]
 
 
-@mock.patch('time.sleep', autospec=True)
+@mock.patch("time.sleep", autospec=True)
 @mock.patch(
-    'google.api_core.datetime_helpers.utcnow',
+    "google.api_core.datetime_helpers.utcnow",
     return_value=datetime.datetime.min,
-    autospec=True)
+    autospec=True,
+)
 def test_retry_target_success(utcnow, sleep):
     predicate = retry.if_exception_type(ValueError)
     call_count = [0]
@@ -77,39 +77,40 @@
     sleep.assert_has_calls([mock.call(0), mock.call(1)])
 
 
-@mock.patch('time.sleep', autospec=True)
+@mock.patch("time.sleep", autospec=True)
 @mock.patch(
-    'google.api_core.datetime_helpers.utcnow',
+    "google.api_core.datetime_helpers.utcnow",
     return_value=datetime.datetime.min,
-    autospec=True)
+    autospec=True,
+)
 def test_retry_target_w_on_error(utcnow, sleep):
     predicate = retry.if_exception_type(ValueError)
-    call_count = {'target': 0}
+    call_count = {"target": 0}
     to_raise = ValueError()
 
     def target():
-        call_count['target'] += 1
-        if call_count['target'] < 3:
+        call_count["target"] += 1
+        if call_count["target"] < 3:
             raise to_raise
         return 42
 
     on_error = mock.Mock()
 
-    result = retry.retry_target(
-        target, predicate, range(10), None, on_error=on_error)
+    result = retry.retry_target(target, predicate, range(10), None, on_error=on_error)
 
     assert result == 42
-    assert call_count['target'] == 3
+    assert call_count["target"] == 3
 
     on_error.assert_has_calls([mock.call(to_raise), mock.call(to_raise)])
     sleep.assert_has_calls([mock.call(0), mock.call(1)])
 
 
-@mock.patch('time.sleep', autospec=True)
+@mock.patch("time.sleep", autospec=True)
 @mock.patch(
-    'google.api_core.datetime_helpers.utcnow',
+    "google.api_core.datetime_helpers.utcnow",
     return_value=datetime.datetime.min,
-    autospec=True)
+    autospec=True,
+)
 def test_retry_target_non_retryable_error(utcnow, sleep):
     predicate = retry.if_exception_type(ValueError)
     exception = TypeError()
@@ -122,12 +123,11 @@
     sleep.assert_not_called()
 
 
-@mock.patch('time.sleep', autospec=True)
-@mock.patch(
-    'google.api_core.datetime_helpers.utcnow', autospec=True)
+@mock.patch("time.sleep", autospec=True)
+@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True)
 def test_retry_target_deadline_exceeded(utcnow, sleep):
     predicate = retry.if_exception_type(ValueError)
-    exception = ValueError('meep')
+    exception = ValueError("meep")
     target = mock.Mock(side_effect=exception)
     # Setup the timeline so that the first call takes 5 seconds but the second
     # call takes 6, which puts the retry over the deadline.
@@ -135,21 +135,21 @@
         # The first call to utcnow establishes the start of the timeline.
         datetime.datetime.min,
         datetime.datetime.min + datetime.timedelta(seconds=5),
-        datetime.datetime.min + datetime.timedelta(seconds=11)]
+        datetime.datetime.min + datetime.timedelta(seconds=11),
+    ]
 
     with pytest.raises(exceptions.RetryError) as exc_info:
         retry.retry_target(target, predicate, range(10), deadline=10)
 
     assert exc_info.value.cause == exception
-    assert exc_info.match('Deadline of 10.0s exceeded')
-    assert exc_info.match('last exception: meep')
+    assert exc_info.match("Deadline of 10.0s exceeded")
+    assert exc_info.match("last exception: meep")
     assert target.call_count == 2
 
 
 def test_retry_target_bad_sleep_generator():
-    with pytest.raises(ValueError, match='Sleep generator'):
-        retry.retry_target(
-            mock.sentinel.target, mock.sentinel.predicate, [], None)
+    with pytest.raises(ValueError, match="Sleep generator"):
+        retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None)
 
 
 class TestRetry(object):
@@ -197,8 +197,7 @@
 
     def test_with_delay(self):
         retry_ = retry.Retry()
-        new_retry = retry_.with_delay(
-            initial=1, maximum=2, multiplier=3)
+        new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3)
         assert retry_ is not new_retry
         assert new_retry._initial == 1
         assert new_retry._maximum == 2
@@ -206,50 +205,50 @@
 
     def test___str__(self):
         retry_ = retry.Retry()
-        assert re.match((
-            r'<Retry predicate=<function.*?if_exception_type.*?>, '
-            r'initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0>'),
-            str(retry_))
+        assert re.match(
+            (
+                r"<Retry predicate=<function.*?if_exception_type.*?>, "
+                r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0>"
+            ),
+            str(retry_),
+        )
 
-    @mock.patch('time.sleep', autospec=True)
+    @mock.patch("time.sleep", autospec=True)
     def test___call___and_execute_success(self, sleep):
         retry_ = retry.Retry()
-        target = mock.Mock(spec=['__call__'], return_value=42)
+        target = mock.Mock(spec=["__call__"], return_value=42)
         # __name__ is needed by functools.partial.
-        target.__name__ = 'target'
+        target.__name__ = "target"
 
         decorated = retry_(target)
         target.assert_not_called()
 
-        result = decorated('meep')
+        result = decorated("meep")
 
         assert result == 42
-        target.assert_called_once_with('meep')
+        target.assert_called_once_with("meep")
         sleep.assert_not_called()
 
     # Make uniform return half of its maximum, which will be the calculated
     # sleep time.
-    @mock.patch(
-        'random.uniform', autospec=True, side_effect=lambda m, n: n/2.0)
-    @mock.patch('time.sleep', autospec=True)
+    @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+    @mock.patch("time.sleep", autospec=True)
     def test___call___and_execute_retry(self, sleep, uniform):
 
-        on_error = mock.Mock(spec=['__call__'], side_effect=[None])
-        retry_ = retry.Retry(
-            predicate=retry.if_exception_type(ValueError),
-        )
+        on_error = mock.Mock(spec=["__call__"], side_effect=[None])
+        retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError))
 
-        target = mock.Mock(spec=['__call__'], side_effect=[ValueError(), 42])
+        target = mock.Mock(spec=["__call__"], side_effect=[ValueError(), 42])
         # __name__ is needed by functools.partial.
-        target.__name__ = 'target'
+        target.__name__ = "target"
 
         decorated = retry_(target, on_error=on_error)
         target.assert_not_called()
 
-        result = decorated('meep')
+        result = decorated("meep")
 
         assert result == 42
         assert target.call_count == 2
-        target.assert_has_calls([mock.call('meep'), mock.call('meep')])
+        target.assert_has_calls([mock.call("meep"), mock.call("meep")])
         sleep.assert_called_once_with(retry_._initial)
         assert on_error.call_count == 1
diff --git a/tests/unit/test_timeout.py b/tests/unit/test_timeout.py
index 5499e87..30d624e 100644
--- a/tests/unit/test_timeout.py
+++ b/tests/unit/test_timeout.py
@@ -21,22 +21,20 @@
 
 
 def test__exponential_timeout_generator_base_2():
-    gen = timeout._exponential_timeout_generator(
-        1.0, 60.0, 2.0, deadline=None)
+    gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None)
 
     result = list(itertools.islice(gen, 8))
     assert result == [1, 2, 4, 8, 16, 32, 60, 60]
 
 
-@mock.patch('google.api_core.datetime_helpers.utcnow', autospec=True)
+@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True)
 def test__exponential_timeout_generator_base_deadline(utcnow):
     # Make each successive call to utcnow() advance one second.
     utcnow.side_effect = [
-        datetime.datetime.min + datetime.timedelta(seconds=n)
-        for n in range(15)]
+        datetime.datetime.min + datetime.timedelta(seconds=n) for n in range(15)
+    ]
 
-    gen = timeout._exponential_timeout_generator(
-        1.0, 60.0, 2.0, deadline=30.0)
+    gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0)
 
     result = list(itertools.islice(gen, 14))
     # Should grow until the cumulative time is > 30s, then start decreasing as
@@ -45,7 +43,6 @@
 
 
 class TestConstantTimeout(object):
-
     def test_constructor(self):
         timeout_ = timeout.ConstantTimeout()
         assert timeout_._timeout is None
@@ -56,10 +53,10 @@
 
     def test___str__(self):
         timeout_ = timeout.ConstantTimeout(1)
-        assert str(timeout_) == '<ConstantTimeout timeout=1.0>'
+        assert str(timeout_) == "<ConstantTimeout timeout=1.0>"
 
     def test_apply(self):
-        target = mock.Mock(spec=['__call__', '__name__'], __name__='target')
+        target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
         timeout_ = timeout.ConstantTimeout(42.0)
         wrapped = timeout_(target)
 
@@ -68,17 +65,16 @@
         target.assert_called_once_with(timeout=42.0)
 
     def test_apply_passthrough(self):
-        target = mock.Mock(spec=['__call__', '__name__'], __name__='target')
+        target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
         timeout_ = timeout.ConstantTimeout(42.0)
         wrapped = timeout_(target)
 
-        wrapped(1, 2, meep='moop')
+        wrapped(1, 2, meep="moop")
 
-        target.assert_called_once_with(1, 2, meep='moop', timeout=42.0)
+        target.assert_called_once_with(1, 2, meep="moop", timeout=42.0)
 
 
 class TestExponentialTimeout(object):
-
     def test_constructor(self):
         timeout_ = timeout.ExponentialTimeout()
         assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT
@@ -105,11 +101,12 @@
     def test___str__(self):
         timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4)
         assert str(timeout_) == (
-            '<ExponentialTimeout initial=1.0, maximum=2.0, multiplier=3.0, '
-            'deadline=4.0>')
+            "<ExponentialTimeout initial=1.0, maximum=2.0, multiplier=3.0, "
+            "deadline=4.0>"
+        )
 
     def test_apply(self):
-        target = mock.Mock(spec=['__call__', '__name__'], __name__='target')
+        target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
         timeout_ = timeout.ExponentialTimeout(1, 10, 2)
         wrapped = timeout_(target)
 
@@ -123,10 +120,10 @@
         target.assert_called_with(timeout=4)
 
     def test_apply_passthrough(self):
-        target = mock.Mock(spec=['__call__', '__name__'], __name__='target')
+        target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
         timeout_ = timeout.ExponentialTimeout(42.0, 100, 2)
         wrapped = timeout_(target)
 
-        wrapped(1, 2, meep='moop')
+        wrapped(1, 2, meep="moop")
 
-        target.assert_called_once_with(1, 2, meep='moop', timeout=42.0)
+        target.assert_called_once_with(1, 2, meep="moop", timeout=42.0)