Snap for 5907899 from 0d1456aa05a2889a7fa6711ec0f0ba9e13be7278 to r-keystone-qcom-release

Change-Id: I57e25af78fe49c42dc755faaf53a6a8afc6971b9
diff --git a/.travis.yml b/.travis.yml
index 6d9ac9d..627bf3d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,25 +1,30 @@
 language: python
 sudo: false
-env:
-  - TOX_ENV=py26
-  - TOX_ENV=py27
-  - TOX_ENV=py27oldoauth2client
-  - TOX_ENV=py34
-  - TOX_ENV=py35
-  - TOX_ENV=pypy
-  - TOX_ENV=lint
+matrix:
+  include:
+  - python: "2.7"
+    env: TOX_ENV=lint
+  - python: "2.7"
+    env: TOX_ENV=py27-oauth2client1
+  - python: "2.7"
+    env: TOX_ENV=py27-oauth2client2
+  - python: "2.7"
+    env: TOX_ENV=py27-oauth2client3
+  - python: "2.7"
+    env: TOX_ENV=py27-oauth2client4
+  - python: "3.4"
+    env: TOX_ENV=py34-oauth2client4
+  - python: "3.5"
+    env: TOX_ENV=py35-oauth2client1
+  - python: "3.5"
+    env: TOX_ENV=py35-oauth2client2
+  - python: "3.5"
+    env: TOX_ENV=py35-oauth2client3
+  - python: "3.5"
+    env: TOX_ENV=py35-oauth2client4
 install:
   - pip install tox
-  - pip install . --allow-external argparse
+  - pip install . argparse
 script: tox -e $TOX_ENV
 after_success:
-  - if [[ "${TOX_ENV}" == "py27" ]]; then tox -e coveralls; fi
-
-# Tweak for adding python3.5; see
-# https://github.com/travis-ci/travis-ci/issues/4794
-addons:
-  apt:
-    sources:
-      - deadsnakes
-    packages:
-      - python3.5
+  - if [[ "${TOX_ENV}" == "py27-oauth2client4" ]]; then tox -e coveralls; fi
diff --git a/METADATA b/METADATA
index 97042dd..9c9b51c 100644
--- a/METADATA
+++ b/METADATA
@@ -1,8 +1,5 @@
 name: "apitools"
-description:
-    "google-apitools is a collection of utilities to make it easier to build "
-    "client-side tools, especially those that talk to Google APIs."
-
+description: "google-apitools is a collection of utilities to make it easier to build client-side tools, especially those that talk to Google APIs."
 third_party {
   url {
     type: HOMEPAGE
@@ -12,6 +9,10 @@
     type: GIT
     value: "https://github.com/google/apitools"
   }
-  version: "0.5.11"
-  last_upgrade_date { year: 2018 month: 6 day: 5 }
+  version: "v0.5.30"
+  last_upgrade_date {
+    year: 2019
+    month: 6
+    day: 26
+  }
 }
diff --git a/TEST_MAPPING b/TEST_MAPPING
new file mode 100644
index 0000000..61a80b2
--- /dev/null
+++ b/TEST_MAPPING
@@ -0,0 +1,8 @@
+{
+  "presubmit" : [
+    {
+      "name" : "acloud_test",
+      "host" : true
+    }
+  ]
+}
diff --git a/apitools/base/protorpclite/messages.py b/apitools/base/protorpclite/messages.py
index df59d18..0d564e9 100644
--- a/apitools/base/protorpclite/messages.py
+++ b/apitools/base/protorpclite/messages.py
@@ -1168,12 +1168,6 @@
     __initialized = False  # pylint:disable=invalid-name
     __variant_to_type = {}  # pylint:disable=invalid-name
 
-    # TODO(craigcitro): Remove this alias.
-    #
-    # We add an alias here for backwards compatibility; note that in
-    # python3, this attribute will silently be ignored.
-    __metaclass__ = _FieldMeta
-
     @util.positional(2)
     def __init__(self,
                  number,
@@ -1526,22 +1520,22 @@
         """Validate StringField allowing for str and unicode.
 
         Raises:
-          ValidationError if a str value is not 7-bit ascii.
+          ValidationError if a str value is not UTF-8.
         """
         # If value is str is it considered valid.  Satisfies "required=True".
         if isinstance(value, bytes):
             try:
-                six.text_type(value, 'ascii')
+                six.text_type(value, 'UTF-8')
             except UnicodeDecodeError as err:
                 try:
                     _ = self.name
                 except AttributeError:
                     validation_error = ValidationError(
-                        'Field encountered non-ASCII string %r: %s' % (value,
+                        'Field encountered non-UTF-8 string %r: %s' % (value,
                                                                        err))
                 else:
                     validation_error = ValidationError(
-                        'Field %s encountered non-ASCII string %r: %s' % (
+                        'Field %s encountered non-UTF-8 string %r: %s' % (
                             self.name, value, err))
                     validation_error.field_name = self.name
                 raise validation_error
diff --git a/apitools/base/protorpclite/messages_test.py b/apitools/base/protorpclite/messages_test.py
index 78fe76e..3ad75e4 100644
--- a/apitools/base/protorpclite/messages_test.py
+++ b/apitools/base/protorpclite/messages_test.py
@@ -39,6 +39,11 @@
 # pylint:disable=unused-variable
 # pylint:disable=too-many-lines
 
+try:
+    long        # Python 2
+except NameError:
+    long = int  # Python 3
+
 
 class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
                           test_util.TestCase):
@@ -615,10 +620,10 @@
         self.assertRaisesWithRegexpMatch(
             messages.InvalidDefaultError,
             r"Invalid default value for StringField:.*: "
-            r"Field encountered non-ASCII string .*: "
-            r"'ascii' codec can't decode byte 0x89 in position 0: "
-            r"ordinal not in range",
-            messages.StringField, 1, default=b'\x89')
+            r"Field encountered non-UTF-8 string .*: "
+            r"'utf.?8' codec can't decode byte 0xc3 in position 0: "
+            r"invalid continuation byte",
+            messages.StringField, 1, default=b'\xc3\x28')
 
     def testDefaultFields_InvalidSingle(self):
         """Test default field is correct type (invalid single)."""
@@ -1160,15 +1165,15 @@
         m2.my_field = None
         self.assertEquals(m1, m2)
 
-    def testNonAsciiStr(self):
-        """Test validation fails for non-ascii StringField values."""
+    def testNonUtf8Str(self):
+        """Test validation fails for non-UTF-8 StringField values."""
         class Thing(messages.Message):
             string_field = messages.StringField(2)
 
         thing = Thing()
         self.assertRaisesWithRegexpMatch(
             messages.ValidationError,
-            'Field string_field encountered non-ASCII string',
+            'Field string_field encountered non-UTF-8 string',
             setattr, thing, 'string_field', test_util.BINARY)
 
 
diff --git a/apitools/base/protorpclite/protojson.py b/apitools/base/protorpclite/protojson.py
index 4c87cf4..4f3fdeb 100644
--- a/apitools/base/protorpclite/protojson.py
+++ b/apitools/base/protorpclite/protojson.py
@@ -78,6 +78,8 @@
 
     logging.error('Must use valid json library (json or simplejson)')
     raise first_import_error  # pylint:disable=raising-bad-type
+
+
 json = _load_json_module()
 
 
@@ -123,6 +125,8 @@
             for unknown_key in value.all_unrecognized_fields():
                 unrecognized_field, _ = value.get_unrecognized_field_info(
                     unknown_key)
+                # Unknown fields are not encoded as they should have been
+                # processed before we get to here.
                 result[unknown_key] = unrecognized_field
             return result
 
@@ -202,6 +206,7 @@
           ValueError: If encoded_message is not valid JSON.
           messages.ValidationError if merged message is not initialized.
         """
+        encoded_message = six.ensure_str(encoded_message)
         if not encoded_message.strip():
             return message_type()
 
@@ -281,11 +286,19 @@
                 valid_value = [self.decode_field(field, item)
                                for item in value]
                 setattr(message, field.name, valid_value)
-            else:
-                # This is just for consistency with the old behavior.
-                if value == []:
-                    continue
+                continue
+            # This is just for consistency with the old behavior.
+            if value == []:
+                continue
+            try:
                 setattr(message, field.name, self.decode_field(field, value))
+            except messages.DecodeError:
+                # Save unknown enum values.
+                if not isinstance(field, messages.EnumField):
+                    raise
+                variant = self.__find_variant(value)
+                if variant:
+                    message.set_unrecognized_field(key, value, variant)
 
         return message
 
@@ -358,6 +371,7 @@
             raise TypeError('Expected protocol of type ProtoJson')
         ProtoJson.__default = protocol
 
+
 CONTENT_TYPE = ProtoJson.CONTENT_TYPE
 
 ALTERNATIVE_CONTENT_TYPES = ProtoJson.ALTERNATIVE_CONTENT_TYPES
diff --git a/apitools/base/protorpclite/protojson_test.py b/apitools/base/protorpclite/protojson_test.py
index 4e4702a..7a8f875 100644
--- a/apitools/base/protorpclite/protojson_test.py
+++ b/apitools/base/protorpclite/protojson_test.py
@@ -51,6 +51,10 @@
 
         nested_value = messages.StringField(1)
 
+    class NestedDatetime(messages.Message):
+
+        nested_dt_value = message_types.DateTimeField(1)
+
     a_string = messages.StringField(2)
     an_integer = messages.IntegerField(3)
     a_float = messages.FloatField(4)
@@ -63,6 +67,7 @@
     a_repeated_datetime = message_types.DateTimeField(11, repeated=True)
     a_custom = CustomField(12)
     a_repeated_custom = CustomField(13, repeated=True)
+    a_nested_datetime = messages.MessageField(NestedDatetime, 14)
 
 
 class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
@@ -198,12 +203,16 @@
 
     def testNumericEnumerationNegativeTest(self):
         """Test with an invalid number for the enum value."""
-        self.assertRaisesRegexp(
-            messages.DecodeError,
-            'Invalid enum value "89"',
-            protojson.decode_message,
-            MyMessage,
-            '{"an_enum": 89}')
+        # The message should successfully decode.
+        message = protojson.decode_message(MyMessage,
+                                           '{"an_enum": 89}')
+
+        expected_message = MyMessage()
+
+        self.assertEquals(expected_message, message)
+        # The roundtrip should result in equivalent encoded
+        # message.
+        self.assertEquals('{"an_enum": 89}', protojson.encode_message(message))
 
     def testAlphaEnumeration(self):
         """Test that alpha enum values work."""
@@ -216,21 +225,27 @@
 
     def testAlphaEnumerationNegativeTest(self):
         """The alpha enum value is invalid."""
-        self.assertRaisesRegexp(
-            messages.DecodeError,
-            'Invalid enum value "IAMINVALID"',
-            protojson.decode_message,
-            MyMessage,
-            '{"an_enum": "IAMINVALID"}')
+        # The message should successfully decode.
+        message = protojson.decode_message(MyMessage,
+                                           '{"an_enum": "IAMINVALID"}')
+
+        expected_message = MyMessage()
+
+        self.assertEquals(expected_message, message)
+        # The roundtrip should result in equivalent encoded message.
+        self.assertEquals('{"an_enum": "IAMINVALID"}',
+                          protojson.encode_message(message))
 
     def testEnumerationNegativeTestWithEmptyString(self):
         """The enum value is an empty string."""
-        self.assertRaisesRegexp(
-            messages.DecodeError,
-            'Invalid enum value ""',
-            protojson.decode_message,
-            MyMessage,
-            '{"an_enum": ""}')
+        # The message should successfully decode.
+        message = protojson.decode_message(MyMessage, '{"an_enum": ""}')
+
+        expected_message = MyMessage()
+
+        self.assertEquals(expected_message, message)
+        # The roundtrip should result in equivalent encoded message.
+        self.assertEquals('{"an_enum": ""}', protojson.encode_message(message))
 
     def testNullValues(self):
         """Test that null values overwrite existing values."""
@@ -358,6 +373,16 @@
         self.assertRaises(messages.DecodeError, protojson.decode_message,
                           MyMessage, '{"a_datetime": "invalid"}')
 
+    def testDecodeInvalidMessage(self):
+        encoded = """{
+        "a_nested_datetime": {
+          "nested_dt_value": "invalid"
+          }
+        }
+        """
+        self.assertRaises(messages.DecodeError, protojson.decode_message,
+                          MyMessage, encoded)
+
     def testEncodeDateTime(self):
         for datetime_string, datetime_vals in (
                 ('2012-09-30T15:31:50.262000',
diff --git a/apitools/base/protorpclite/test_util.py b/apitools/base/protorpclite/test_util.py
index a86cfc7..43345fc 100644
--- a/apitools/base/protorpclite/test_util.py
+++ b/apitools/base/protorpclite/test_util.py
@@ -88,7 +88,7 @@
         self.assertEqual(params1, params2)
 
     def assertIterEqual(self, iter1, iter2):
-        """Check that two iterators or iterables are equal independent of order.
+        """Check two iterators or iterables are equal independent of order.
 
         Similar to Python 2.7 assertItemsEqual.  Named differently in order to
         avoid potential conflict.
@@ -579,11 +579,16 @@
         self.assertTrue(isinstance(self.PROTOLIB.CONTENT_TYPE, str))
 
     def testDecodeInvalidEnumType(self):
-        self.assertRaisesWithRegexpMatch(messages.DecodeError,
-                                         'Invalid enum value ',
-                                         self.PROTOLIB.decode_message,
-                                         OptionalMessage,
-                                         self.encoded_invalid_enum)
+        # Since protos need to be able to add new enums, a message should be
+        # successfully decoded even if the enum value is invalid. Encoding the
+        # decoded message should result in equivalence with the original
+        # encoded message containing an invalid enum.
+        decoded = self.PROTOLIB.decode_message(OptionalMessage,
+                                               self.encoded_invalid_enum)
+        message = OptionalMessage()
+        self.assertEqual(message, decoded)
+        encoded = self.PROTOLIB.encode_message(decoded)
+        self.assertEqual(self.encoded_invalid_enum, encoded)
 
     def testDateTimeNoTimeZone(self):
         """Test that DateTimeFields are encoded/decoded correctly."""
diff --git a/apitools/base/protorpclite/util.py b/apitools/base/protorpclite/util.py
index 7a7797d..b0ba240 100644
--- a/apitools/base/protorpclite/util.py
+++ b/apitools/base/protorpclite/util.py
@@ -53,7 +53,7 @@
 
 
 def positional(max_positional_args):
-    """A decorator to declare that only the first N arguments may be positional.
+    """A decorator that declares only the first N arguments may be positional.
 
     This decorator makes it easy to support Python 3 style keyword-only
     parameters. For example, in Python 3 it is possible to write:
diff --git a/apitools/base/py/app2.py b/apitools/base/py/app2.py
deleted file mode 100644
index c0ea9e0..0000000
--- a/apitools/base/py/app2.py
+++ /dev/null
@@ -1,373 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2015 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Appcommands-compatible command class with extra fixins."""
-from __future__ import absolute_import
-from __future__ import print_function
-
-import cmd
-import inspect
-import pdb
-import shlex
-import sys
-import traceback
-import types
-
-import gflags as flags
-from google.apputils import app
-from google.apputils import appcommands
-import six
-
-
-__all__ = [
-    'NewCmd',
-    'Repl',
-]
-
-flags.DEFINE_boolean(
-    'debug_mode', False,
-    'Show tracebacks on Python exceptions.')
-flags.DEFINE_boolean(
-    'headless', False,
-    'Assume no user is at the controlling console.')
-FLAGS = flags.FLAGS
-
-
-def _SafeMakeAscii(s):
-    if isinstance(s, six.text_type):
-        return s.encode('ascii')
-    elif isinstance(s, str):
-        return s.decode('ascii')
-    return six.text_type(s).encode('ascii', 'backslashreplace')
-
-
-class NewCmd(appcommands.Cmd):
-
-    """Featureful extension of appcommands.Cmd."""
-
-    def __init__(self, name, flag_values):
-        super(NewCmd, self).__init__(name, flag_values)
-        run_with_args = getattr(self, 'RunWithArgs', None)
-        self._new_style = isinstance(run_with_args, types.MethodType)
-        if self._new_style:
-            func = run_with_args.__func__
-
-            argspec = inspect.getargspec(func)
-            if argspec.args and argspec.args[0] == 'self':
-                argspec = argspec._replace(  # pylint: disable=protected-access
-                    args=argspec.args[1:])
-            self._argspec = argspec
-            # TODO(craigcitro): Do we really want to support all this
-            # nonsense?
-            self._star_args = self._argspec.varargs is not None
-            self._star_kwds = self._argspec.keywords is not None
-            self._max_args = len(self._argspec.args or ())
-            self._min_args = self._max_args - len(self._argspec.defaults or ())
-            if self._star_args:
-                self._max_args = sys.maxsize
-
-            self._debug_mode = FLAGS.debug_mode
-            self.surface_in_shell = True
-            self.__doc__ = self.RunWithArgs.__doc__
-
-    def __getattr__(self, name):
-        if name in self._command_flags:
-            return self._command_flags[name].value
-        return super(NewCmd, self).__getattribute__(name)
-
-    def _GetFlag(self, flagname):
-        if flagname in self._command_flags:
-            return self._command_flags[flagname]
-        return None
-
-    def Run(self, argv):
-        """Run this command.
-
-        If self is a new-style command, we set up arguments and call
-        self.RunWithArgs, gracefully handling exceptions. If not, we
-        simply call self.Run(argv).
-
-        Args:
-          argv: List of arguments as strings.
-
-        Returns:
-          0 on success, nonzero on failure.
-        """
-        if not self._new_style:
-            return super(NewCmd, self).Run(argv)
-
-        # TODO(craigcitro): We need to save and restore flags each time so
-        # that we can per-command flags in the REPL.
-        args = argv[1:]
-        fail = None
-        fail_template = '%s positional args, found %d, expected at %s %d'
-        if len(args) < self._min_args:
-            fail = fail_template % ('Not enough', len(args),
-                                    'least', self._min_args)
-        if len(args) > self._max_args:
-            fail = fail_template % ('Too many', len(args),
-                                    'most', self._max_args)
-        if fail:
-            print(fail)
-            if self.usage:
-                print('Usage: %s' % (self.usage,))
-            return 1
-
-        if self._debug_mode:
-            return self.RunDebug(args, {})
-        return self.RunSafely(args, {})
-
-    def RunCmdLoop(self, argv):
-        """Hook for use in cmd.Cmd-based command shells."""
-        try:
-            args = shlex.split(argv)
-        except ValueError as e:
-            raise SyntaxError(self.EncodeForPrinting(e))
-        return self.Run([self._command_name] + args)
-
-    @staticmethod
-    def EncodeForPrinting(s):
-        """Safely encode a string as the encoding for sys.stdout."""
-        encoding = sys.stdout.encoding or 'ascii'
-        return six.text_type(s).encode(encoding, 'backslashreplace')
-
-    def _FormatError(self, e):
-        """Hook for subclasses to modify how error messages are printed."""
-        return _SafeMakeAscii(e)
-
-    def _HandleError(self, e):
-        message = self._FormatError(e)
-        print('Exception raised in %s operation: %s' % (
-            self._command_name, message))
-        return 1
-
-    def _IsDebuggableException(self, e):
-        """Hook for subclasses to skip debugging on certain exceptions."""
-        return not isinstance(e, app.UsageError)
-
-    def RunDebug(self, args, kwds):
-        """Run this command in debug mode."""
-        try:
-            return_value = self.RunWithArgs(*args, **kwds)
-        except BaseException as e:
-            # Don't break into the debugger for expected exceptions.
-            if not self._IsDebuggableException(e):
-                return self._HandleError(e)
-            print()
-            print('****************************************************')
-            print('**   Unexpected Exception raised in execution!    **')
-            if FLAGS.headless:
-                print('**  --headless mode enabled, exiting.             **')
-                print('**  See STDERR for traceback.                     **')
-            else:
-                print('**  --debug_mode enabled, starting pdb.           **')
-            print('****************************************************')
-            print()
-            traceback.print_exc()
-            print()
-            if not FLAGS.headless:
-                pdb.post_mortem()
-            return 1
-        return return_value
-
-    def RunSafely(self, args, kwds):
-        """Run this command, turning exceptions into print statements."""
-        try:
-            return_value = self.RunWithArgs(*args, **kwds)
-        except BaseException as e:
-            return self._HandleError(e)
-        return return_value
-
-
-class CommandLoop(cmd.Cmd):
-
-    """Instance of cmd.Cmd built to work with NewCmd."""
-
-    class TerminateSignal(Exception):
-
-        """Exception type used for signaling loop completion."""
-
-    def __init__(self, commands, prompt):
-        cmd.Cmd.__init__(self)
-        self._commands = {'help': commands['help']}
-        self._special_command_names = ['help', 'repl', 'EOF']
-        for name, command in commands.items():
-            if (name not in self._special_command_names and
-                    isinstance(command, NewCmd) and
-                    command.surface_in_shell):
-                self._commands[name] = command
-                setattr(self, 'do_%s' % (name,), command.RunCmdLoop)
-        self._default_prompt = prompt
-        self._set_prompt()
-        self._last_return_code = 0
-
-    @property
-    def last_return_code(self):
-        return self._last_return_code
-
-    def _set_prompt(self):  # pylint: disable=invalid-name
-        self.prompt = self._default_prompt
-
-    def do_EOF(self, *unused_args):  # pylint: disable=invalid-name
-        """Terminate the running command loop.
-
-        This function raises an exception to avoid the need to do
-        potentially-error-prone string parsing inside onecmd.
-
-        Args:
-          *unused_args: unused.
-
-        Returns:
-          Never returns.
-
-        Raises:
-          CommandLoop.TerminateSignal: always.
-        """
-        raise CommandLoop.TerminateSignal()
-
-    def postloop(self):
-        print('Goodbye.')
-
-    # pylint: disable=arguments-differ
-    def completedefault(self, unused_text, line, unused_begidx, unused_endidx):
-        if not line:
-            return []
-        else:
-            command_name = line.partition(' ')[0].lower()
-            usage = ''
-            if command_name in self._commands:
-                usage = self._commands[command_name].usage
-            if usage:
-                print()
-                print(usage)
-                print('%s%s' % (self.prompt, line), end=' ')
-            return []
-    # pylint: enable=arguments-differ
-
-    def emptyline(self):
-        print('Available commands:', end=' ')
-        print(' '.join(list(self._commands)))
-
-    def precmd(self, line):
-        """Preprocess the shell input."""
-        if line == 'EOF':
-            return line
-        if line.startswith('exit') or line.startswith('quit'):
-            return 'EOF'
-        words = line.strip().split()
-        if len(words) == 1 and words[0] not in ['help', 'ls', 'version']:
-            return 'help %s' % (line.strip(),)
-        return line
-
-    def onecmd(self, line):
-        """Process a single command.
-
-        Runs a single command, and stores the return code in
-        self._last_return_code. Always returns False unless the command
-        was EOF.
-
-        Args:
-          line: (str) Command line to process.
-
-        Returns:
-          A bool signaling whether or not the command loop should terminate.
-        """
-        try:
-            self._last_return_code = cmd.Cmd.onecmd(self, line)
-        except CommandLoop.TerminateSignal:
-            return True
-        except BaseException as e:
-            name = line.split(' ')[0]
-            print('Error running %s:' % name)
-            print(e)
-            self._last_return_code = 1
-        return False
-
-    def get_names(self):
-        names = dir(self)
-        commands = (name for name in self._commands
-                    if name not in self._special_command_names)
-        names.extend('do_%s' % (name,) for name in commands)
-        names.remove('do_EOF')
-        return names
-
-    def do_help(self, arg):
-        """Print the help for command_name (if present) or general help."""
-
-        command_name = arg
-
-        # TODO(craigcitro): Add command-specific flags.
-        def FormatOneCmd(name, command, command_names):
-            """Format one command."""
-            indent_size = appcommands.GetMaxCommandLength() + 3
-            if len(command_names) > 1:
-                indent = ' ' * indent_size
-                command_help = flags.TextWrap(
-                    command.CommandGetHelp('', cmd_names=command_names),
-                    indent=indent,
-                    firstline_indent='')
-                first_help_line, _, rest = command_help.partition('\n')
-                first_line = '%-*s%s' % (indent_size,
-                                         name + ':', first_help_line)
-                return '\n'.join((first_line, rest))
-            default_indent = '  '
-            return '\n' + flags.TextWrap(
-                command.CommandGetHelp('', cmd_names=command_names),
-                indent=default_indent,
-                firstline_indent=default_indent) + '\n'
-
-        if not command_name:
-            print('\nHelp for commands:\n')
-            command_names = list(self._commands)
-            print('\n\n'.join(
-                FormatOneCmd(name, command, command_names)
-                for name, command in self._commands.items()
-                if name not in self._special_command_names))
-            print()
-        elif command_name in self._commands:
-            print(FormatOneCmd(command_name, self._commands[command_name],
-                               command_names=[command_name]))
-        return 0
-
-    def postcmd(self, stop, line):
-        return bool(stop) or line == 'EOF'
-
-
-class Repl(NewCmd):
-
-    """Start an interactive session."""
-    PROMPT = '> '
-
-    def __init__(self, name, fv):
-        super(Repl, self).__init__(name, fv)
-        self.surface_in_shell = False
-        flags.DEFINE_string(
-            'prompt', '',
-            'Prompt to use for interactive shell.',
-            flag_values=fv)
-
-    def RunWithArgs(self):
-        """Start an interactive session."""
-        prompt = FLAGS.prompt or self.PROMPT
-        repl = CommandLoop(appcommands.GetCommandList(), prompt=prompt)
-        print('Welcome! (Type help for more information.)')
-        while True:
-            try:
-                repl.cmdloop()
-                break
-            except KeyboardInterrupt:
-                print()
-        return repl.last_return_code
diff --git a/apitools/base/py/base_api.py b/apitools/base/py/base_api.py
index 98836c9..1d490c3 100644
--- a/apitools/base/py/base_api.py
+++ b/apitools/base/py/base_api.py
@@ -115,6 +115,8 @@
     request_field = messages.StringField(11, default='')
     upload_config = messages.MessageField(ApiUploadInfo, 12)
     supports_download = messages.BooleanField(13, default=False)
+
+
 REQUEST_IS_BODY = '<request>'
 
 
@@ -239,7 +241,8 @@
                  model=None, log_request=False, log_response=False,
                  num_retries=5, max_retry_wait=60, credentials_args=None,
                  default_global_params=None, additional_http_headers=None,
-                 check_response_func=None, retry_func=None):
+                 check_response_func=None, retry_func=None,
+                 response_encoding=None):
         _RequireClassAttrs(self, ('_package', '_scopes', 'messages_module'))
         if default_global_params is not None:
             util.Typecheck(default_global_params, self.params_type)
@@ -267,6 +270,7 @@
         self.additional_http_headers = additional_http_headers or {}
         self.check_response_func = check_response_func
         self.retry_func = retry_func
+        self.response_encoding = response_encoding
 
         # TODO(craigcitro): Finish deprecating these fields.
         _ = model
@@ -594,22 +598,26 @@
     def __ProcessHttpResponse(self, method_config, http_response, request):
         """Process the given http response."""
         if http_response.status_code not in (http_client.OK,
+                                             http_client.CREATED,
                                              http_client.NO_CONTENT):
-            raise exceptions.HttpError(
-                http_response.info, http_response.content,
-                http_response.request_url, method_config, request)
+            raise exceptions.HttpError.FromResponse(
+                http_response, method_config=method_config, request=request)
         if http_response.status_code == http_client.NO_CONTENT:
             # TODO(craigcitro): Find out why _replace doesn't seem to work
             # here.
             http_response = http_wrapper.Response(
                 info=http_response.info, content='{}',
                 request_url=http_response.request_url)
+
+        content = http_response.content
+        if self._client.response_encoding and isinstance(content, bytes):
+            content = content.decode(self._client.response_encoding)
+
         if self.__client.response_type_model == 'json':
-            return http_response.content
+            return content
         response_type = _LoadClass(method_config.response_type_name,
                                    self.__client.MESSAGES_MODULE)
-        return self.__client.DeserializeMessage(
-            response_type, http_response.content)
+        return self.__client.DeserializeMessage(response_type, content)
 
     def __SetBaseHeaders(self, http_request, client):
         """Fill in the basic headers on http_request."""
diff --git a/apitools/base/py/base_api_test.py b/apitools/base/py/base_api_test.py
index 7b23fa6..b00085c 100644
--- a/apitools/base/py/base_api_test.py
+++ b/apitools/base/py/base_api_test.py
@@ -132,6 +132,25 @@
                 http_response.content,
                 service.ProcessHttpResponse(method_config, http_response))
 
+    def testJsonResponseEncoding(self):
+        # On Python 3, httplib2 always returns bytes, so we need to check that
+        # we can correctly decode the message content using the given encoding.
+        method_config = base_api.ApiMethodInfo(
+            response_type_name='SimpleMessage')
+        service = FakeService(FakeClient(
+            'http://www.example.com/', credentials=FakeCredentials(),
+            response_encoding='utf8'))
+        http_response = http_wrapper.Response(
+            info={'status': '200'}, content=b'{"field": "abc"}',
+            request_url='http://www.google.com')
+        response_message = SimpleMessage(field=u'abc')
+        self.assertEqual(response_message, service.ProcessHttpResponse(
+            method_config, http_response))
+        with service.client.JsonResponseModel():
+            self.assertEqual(
+                http_response.content.decode('utf8'),
+                service.ProcessHttpResponse(method_config, http_response))
+
     def testAdditionalHeaders(self):
         additional_headers = {'Request-Is-Awesome': '1'}
         client = self.__GetFakeClient()
@@ -198,10 +217,9 @@
         service = FakeService(client=client)
         request = SimpleMessage()
         with mock(base_api.http_wrapper, 'MakeRequest', fakeMakeRequest):
-            with self.assertRaises(exceptions.HttpError) as error_context:
+            with self.assertRaises(exceptions.HttpBadRequestError) as err:
                 service._RunMethod(method_config, request)
-        http_error = error_context.exception
-        self.assertEquals(400, http_error.status_code)
+        http_error = err.exception
         self.assertEquals('http://www.google.com', http_error.url)
         self.assertEquals('{"field": "abc"}', http_error.content)
         self.assertEquals(method_config, http_error.method_config)
diff --git a/apitools/base/py/base_cli.py b/apitools/base/py/base_cli.py
deleted file mode 100644
index 2527e64..0000000
--- a/apitools/base/py/base_cli.py
+++ /dev/null
@@ -1,161 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2015 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Base script for generated CLI."""
-
-from __future__ import absolute_import
-
-import atexit
-import code
-import logging
-import os
-import readline
-import rlcompleter
-import sys
-
-import gflags as flags
-from google.apputils import appcommands
-
-from apitools.base.py import encoding
-from apitools.base.py import exceptions
-
-__all__ = [
-    'ConsoleWithReadline',
-    'DeclareBaseFlags',
-    'FormatOutput',
-    'SetupLogger',
-    'run_main',
-]
-
-
-# TODO(craigcitro): We should move all the flags for the
-# StandardQueryParameters into this file, so that they can be used
-# elsewhere easily.
-
-_BASE_FLAGS_DECLARED = False
-_OUTPUT_FORMATTER_MAP = {
-    'protorpc': lambda x: x,
-    'json': encoding.MessageToJson,
-}
-
-
-def DeclareBaseFlags():
-    """Declare base flags for all CLIs."""
-    # TODO(craigcitro): FlagValidators?
-    global _BASE_FLAGS_DECLARED  # pylint: disable=global-statement
-    if _BASE_FLAGS_DECLARED:
-        return
-    flags.DEFINE_boolean(
-        'log_request', False,
-        'Log requests.')
-    flags.DEFINE_boolean(
-        'log_response', False,
-        'Log responses.')
-    flags.DEFINE_boolean(
-        'log_request_response', False,
-        'Log requests and responses.')
-    flags.DEFINE_enum(
-        'output_format',
-        'protorpc',
-        _OUTPUT_FORMATTER_MAP.keys(),
-        'Display format for results.')
-
-    _BASE_FLAGS_DECLARED = True
-
-FLAGS = flags.FLAGS
-
-
-def SetupLogger():
-    if FLAGS.log_request or FLAGS.log_response or FLAGS.log_request_response:
-        logging.basicConfig()
-        logging.getLogger().setLevel(logging.INFO)
-
-
-def FormatOutput(message, output_format=None):
-    """Convert the output to the user-specified format."""
-    output_format = output_format or FLAGS.output_format
-    formatter = _OUTPUT_FORMATTER_MAP.get(FLAGS.output_format)
-    if formatter is None:
-        raise exceptions.UserError('Unknown output format: %s' % output_format)
-    return formatter(message)
-
-
-class _SmartCompleter(rlcompleter.Completer):
-
-    def _callable_postfix(self, val, word):
-        if ('(' in readline.get_line_buffer() or
-                not callable(val)):
-            return word
-        return word + '('
-
-    def complete(self, text, state):
-        if not readline.get_line_buffer().strip():
-            if not state:
-                return '  '
-            return None
-        return rlcompleter.Completer.complete(self, text, state)
-
-
-class ConsoleWithReadline(code.InteractiveConsole):
-
-    """InteractiveConsole with readline, tab completion, and history."""
-
-    def __init__(self, env, filename='<console>', histfile=None):
-        new_locals = dict(env)
-        new_locals.update({
-            '_SmartCompleter': _SmartCompleter,
-            'readline': readline,
-            'rlcompleter': rlcompleter,
-        })
-        code.InteractiveConsole.__init__(self, new_locals, filename)
-        readline.parse_and_bind('tab: complete')
-        readline.set_completer(_SmartCompleter(new_locals).complete)
-        if histfile is not None:
-            histfile = os.path.expanduser(histfile)
-            if os.path.exists(histfile):
-                readline.read_history_file(histfile)
-            atexit.register(lambda: readline.write_history_file(histfile))
-
-
-def run_main():  # pylint: disable=invalid-name
-    """Function to be used as setuptools script entry point.
-
-    Appcommands assumes that it always runs as __main__, but launching
-    via a setuptools-generated entry_point breaks this rule. We do some
-    trickery here to make sure that appcommands and flags find their
-    state where they expect to by faking ourselves as __main__.
-    """
-
-    # Put the flags for this module somewhere the flags module will look
-    # for them.
-    # pylint: disable=protected-access
-    new_name = flags._GetMainModule()
-    sys.modules[new_name] = sys.modules['__main__']
-    for flag in FLAGS.FlagsByModuleDict().get(__name__, []):
-        FLAGS._RegisterFlagByModule(new_name, flag)
-        for key_flag in FLAGS.KeyFlagsByModuleDict().get(__name__, []):
-            FLAGS._RegisterKeyFlagForModule(new_name, key_flag)
-    # pylint: enable=protected-access
-
-    # Now set __main__ appropriately so that appcommands will be
-    # happy.
-    sys.modules['__main__'] = sys.modules[__name__]
-    appcommands.Run()
-    sys.modules['__main__'] = sys.modules.pop(new_name)
-
-
-if __name__ == '__main__':
-    appcommands.Run()
diff --git a/apitools/base/py/batch.py b/apitools/base/py/batch.py
index f925ccf..cdb9ce6 100644
--- a/apitools/base/py/batch.py
+++ b/apitools/base/py/batch.py
@@ -123,7 +123,7 @@
             return response_code not in self.__retryable_codes
 
         def HandleResponse(self, http_response, exception):
-            """Handles an incoming http response to the request in http_request.
+            """Handles incoming http response to the request in http_request.
 
             This is intended to be used as a callback function for
             BatchHttpRequest.Add.
@@ -140,16 +140,19 @@
                 self.__response = self.__service.ProcessHttpResponse(
                     self.__method_config, self.__http_response)
 
-    def __init__(self, batch_url=None, retryable_codes=None):
+    def __init__(self, batch_url=None, retryable_codes=None,
+                 response_encoding=None):
         """Initialize a batch API request object.
 
         Args:
           batch_url: Base URL for batch API calls.
           retryable_codes: A list of integer HTTP codes that can be retried.
+          response_encoding: The encoding type of response content.
         """
         self.api_requests = []
         self.retryable_codes = retryable_codes or []
         self.batch_url = batch_url or 'https://www.googleapis.com/batch'
+        self.response_encoding = response_encoding
 
     def Add(self, service, method, request, global_params=None):
         """Add a request to the batch.
@@ -213,7 +216,8 @@
                 # incomplete requests.
                 batch_http_request = BatchHttpRequest(
                     batch_url=self.batch_url,
-                    callback=batch_request_callback
+                    callback=batch_request_callback,
+                    response_encoding=self.response_encoding
                 )
                 for request in itertools.islice(requests,
                                                 i, i + batch_size):
@@ -240,7 +244,7 @@
 
     """Batches multiple http_wrapper.Request objects into a single request."""
 
-    def __init__(self, batch_url, callback=None):
+    def __init__(self, batch_url, callback=None, response_encoding=None):
         """Constructor for a BatchHttpRequest.
 
         Args:
@@ -251,6 +255,7 @@
               apiclient.errors.HttpError exception object if an HTTP error
               occurred while processing the request, or None if no error
               occurred.
+          response_encoding: The encoding type of response content.
         """
         # Endpoint to which these requests are sent.
         self.__batch_url = batch_url
@@ -259,6 +264,9 @@
         # batch.
         self.__callback = callback
 
+        # Response content will be decoded if this is provided.
+        self.__response_encoding = response_encoding
+
         # List of requests, responses and handlers.
         self.__request_response_handlers = {}
 
@@ -320,10 +328,12 @@
         # Construct status line
         parsed = urllib_parse.urlsplit(request.url)
         request_line = urllib_parse.urlunsplit(
-            (None, None, parsed.path, parsed.query, None))
+            ('', '', parsed.path, parsed.query, ''))
+        if not isinstance(request_line, six.text_type):
+            request_line = request_line.decode('utf-8')
         status_line = u' '.join((
             request.http_method,
-            request_line.decode('utf-8'),
+            request_line,
             u'HTTP/1.1\n'
         ))
         major, minor = request.headers.get(
@@ -444,8 +454,12 @@
         # Prepend with a content-type header so Parser can handle it.
         header = 'content-type: %s\r\n\r\n' % response.info['content-type']
 
+        content = response.content
+        if isinstance(content, bytes) and self.__response_encoding:
+            content = response.content.decode(self.__response_encoding)
+
         parser = email_parser.Parser()
-        mime_response = parser.parsestr(header + response.content)
+        mime_response = parser.parsestr(header + content)
 
         if not mime_response.is_multipart():
             raise exceptions.BatchError(
diff --git a/apitools/base/py/batch_test.py b/apitools/base/py/batch_test.py
index 9bf9dd0..0574dc6 100644
--- a/apitools/base/py/batch_test.py
+++ b/apitools/base/py/batch_test.py
@@ -427,6 +427,24 @@
         self.assertEqual(expected_serialized_request,
                          batch_request._SerializeRequest(request))
 
+    def testSerializeRequestWithPathAndQueryParams(self):
+        request = http_wrapper.Request(
+            url='my/path?query=param',
+            body='Hello World',
+            headers={'content-type': 'protocol/version'})
+        expected_serialized_request = '\n'.join([
+            'GET my/path?query=param HTTP/1.1',
+            'Content-Type: protocol/version',
+            'MIME-Version: 1.0',
+            'content-length: 11',
+            'Host: ',
+            '',
+            'Hello World',
+        ])
+        batch_request = batch.BatchHttpRequest('https://www.example.com')
+        self.assertEqual(expected_serialized_request,
+                         batch_request._SerializeRequest(request))
+
     def testDeserializeRequest(self):
         serialized_payload = '\n'.join([
             'GET  HTTP/1.1',
@@ -564,6 +582,49 @@
             self.assertIn(
                 'Second response', test_responses['2'].response.content)
 
+    def testInternalExecuteWithEncodedResponse(self):
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as mock_request:
+            self.__ConfigureMock(
+                mock_request,
+                http_wrapper.Request('https://www.example.com', 'POST', {
+                    'content-type': 'multipart/mixed; boundary="None"',
+                    'content-length': 274,
+                }, 'x' * 274),
+                http_wrapper.Response({
+                    'status': '200',
+                    'content-type': 'multipart/mixed; boundary="boundary"',
+                }, textwrap.dedent("""\
+                --boundary
+                content-type: text/plain
+                content-id: <id+1>
+
+                HTTP/1.1 200 OK
+                response
+
+                --boundary--""").encode('utf-8'), None))
+
+            test_request = {
+                '1': batch.RequestResponseAndHandler(
+                    http_wrapper.Request(body='first'), None, None),
+            }
+
+            batch_request = batch.BatchHttpRequest('https://www.example.com',
+                                                   response_encoding='utf-8')
+            batch_request._BatchHttpRequest__request_response_handlers = (
+                test_request)
+
+            batch_request._Execute(FakeHttp())
+
+            test_responses = (
+                batch_request._BatchHttpRequest__request_response_handlers)
+
+            self.assertEqual(http_client.OK,
+                             test_responses['1'].response.status_code)
+
+            self.assertIn(
+                'response', test_responses['1'].response.content)
+
     def testPublicExecute(self):
 
         def LocalCallback(response, exception):
diff --git a/apitools/base/py/cli.py b/apitools/base/py/cli.py
deleted file mode 100644
index 920cfc5..0000000
--- a/apitools/base/py/cli.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2015 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Top-level import for all CLI-related functionality in apitools.
-
-Note that importing this file will ultimately have side-effects, and
-may require imports not available in all environments (such as App
-Engine). In particular, picking up some readline-related imports can
-cause pain.
-"""
-
-# pylint:disable=wildcard-import
-# pylint:disable=unused-wildcard-import
-
-from apitools.base.py.app2 import *
-from apitools.base.py.base_cli import *
-
-try:
-    # pylint:disable=no-name-in-module
-    from apitools.base.py.internal.cli import *
-except ImportError:
-    pass
diff --git a/apitools/base/py/compression.py b/apitools/base/py/compression.py
new file mode 100644
index 0000000..ca111be
--- /dev/null
+++ b/apitools/base/py/compression.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Compression support for apitools."""
+
+from collections import deque
+
+from apitools.base.py import gzip
+
+__all__ = [
+    'CompressStream',
+]
+
+
+# pylint: disable=invalid-name
+# Note: Apitools only uses the default chunksize when compressing.
+def CompressStream(in_stream, length=None, compresslevel=2,
+                   chunksize=16777216):
+
+    """Compresses an input stream into a file-like buffer.
+
+    This reads from the input stream until either we've stored at least length
+    compressed bytes, or the input stream has been exhausted.
+
+    This supports streams of unknown size.
+
+    Args:
+        in_stream: The input stream to read from.
+        length: The target number of compressed bytes to buffer in the output
+            stream. If length is none, the input stream will be compressed
+            until it's exhausted.
+
+            The actual length of the output buffer can vary from the target.
+            If the input stream is exhaused, the output buffer may be smaller
+            than expected. If the data is incompressible, the maximum length
+            can be exceeded by can be calculated to be:
+
+              chunksize + 5 * (floor((chunksize - 1) / 16383) + 1) + 17
+
+            This accounts for additional header data gzip adds. For the default
+            16MiB chunksize, this results in the max size of the output buffer
+            being:
+
+              length + 16Mib + 5142 bytes
+
+        compresslevel: Optional, defaults to 2. The desired compression level.
+        chunksize: Optional, defaults to 16MiB. The chunk size used when
+            reading data from the input stream to write into the output
+            buffer.
+
+    Returns:
+        A file-like output buffer of compressed bytes, the number of bytes read
+        from the input stream, and a flag denoting if the input stream was
+        exhausted.
+    """
+    in_read = 0
+    in_exhausted = False
+    out_stream = StreamingBuffer()
+    with gzip.GzipFile(mode='wb',
+                       fileobj=out_stream,
+                       compresslevel=compresslevel) as compress_stream:
+        # Read until we've written at least length bytes to the output stream.
+        while not length or out_stream.length < length:
+            data = in_stream.read(chunksize)
+            data_length = len(data)
+            compress_stream.write(data)
+            in_read += data_length
+            # If we read less than requested, the stream is exhausted.
+            if data_length < chunksize:
+                in_exhausted = True
+                break
+    return out_stream, in_read, in_exhausted
+
+
+class StreamingBuffer(object):
+
+    """Provides a file-like object that writes to a temporary buffer.
+
+    When data is read from the buffer, it is permanently removed. This is
+    useful when there are memory constraints preventing the entire buffer from
+    being stored in memory.
+    """
+
+    def __init__(self):
+        # The buffer of byte arrays.
+        self.__buf = deque()
+        # The number of bytes in __buf.
+        self.__size = 0
+
+    def __len__(self):
+        return self.__size
+
+    def __nonzero__(self):
+        # For 32-bit python2.x, len() cannot exceed a 32-bit number; avoid
+        # accidental len() calls from httplib in the form of "if this_object:".
+        return bool(self.__size)
+
+    @property
+    def length(self):
+        # For 32-bit python2.x, len() cannot exceed a 32-bit number.
+        return self.__size
+
+    def write(self, data):
+        # Gzip can write many 0 byte chunks for highly compressible data.
+        # Prevent them from being added internally.
+        if data is not None and data:
+            self.__buf.append(data)
+            self.__size += len(data)
+
+    def read(self, size=None):
+        """Read at most size bytes from this buffer.
+
+        Bytes read from this buffer are consumed and are permanently removed.
+
+        Args:
+          size: If provided, read no more than size bytes from the buffer.
+            Otherwise, this reads the entire buffer.
+
+        Returns:
+          The bytes read from this buffer.
+        """
+        if size is None:
+            size = self.__size
+        ret_list = []
+        while size > 0 and self.__buf:
+            data = self.__buf.popleft()
+            size -= len(data)
+            ret_list.append(data)
+        if size < 0:
+            ret_list[-1], remainder = ret_list[-1][:size], ret_list[-1][size:]
+            self.__buf.appendleft(remainder)
+        ret = b''.join(ret_list)
+        self.__size -= len(ret)
+        return ret
diff --git a/apitools/base/py/compression_test.py b/apitools/base/py/compression_test.py
new file mode 100644
index 0000000..c8ecdac
--- /dev/null
+++ b/apitools/base/py/compression_test.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for compression."""
+
+from apitools.base.py import compression
+from apitools.base.py import gzip
+
+import six
+import unittest2
+
+
+class CompressionTest(unittest2.TestCase):
+
+    def setUp(self):
+        # Sample highly compressible data (~50MB).
+        self.sample_data = b'abc' * 16777216
+        # Stream of the sample data.
+        self.stream = six.BytesIO()
+        self.stream.write(self.sample_data)
+        self.length = self.stream.tell()
+        self.stream.seek(0)
+
+    def testCompressionExhausted(self):
+        """Test full compression.
+
+        Test that highly compressible data is actually compressed in entirety.
+        """
+        output, read, exhausted = compression.CompressStream(
+            self.stream,
+            self.length,
+            9)
+        # Ensure the compressed buffer is smaller than the input buffer.
+        self.assertLess(output.length, self.length)
+        # Ensure we read the entire input stream.
+        self.assertEqual(read, self.length)
+        # Ensure the input stream was exhausted.
+        self.assertTrue(exhausted)
+
+    def testCompressionUnbounded(self):
+        """Test unbounded compression.
+
+        Test that the input stream is exhausted when length is none.
+        """
+        output, read, exhausted = compression.CompressStream(
+            self.stream,
+            None,
+            9)
+        # Ensure the compressed buffer is smaller than the input buffer.
+        self.assertLess(output.length, self.length)
+        # Ensure we read the entire input stream.
+        self.assertEqual(read, self.length)
+        # Ensure the input stream was exhausted.
+        self.assertTrue(exhausted)
+
+    def testCompressionPartial(self):
+        """Test partial compression.
+
+        Test that the length parameter works correctly. The amount of data
+        that's compressed can be greater than or equal to the requested length.
+        """
+        output_length = 40
+        output, _, exhausted = compression.CompressStream(
+            self.stream,
+            output_length,
+            9)
+        # Ensure the requested read size is <= the compressed buffer size.
+        self.assertLessEqual(output_length, output.length)
+        # Ensure the input stream was not exhausted.
+        self.assertFalse(exhausted)
+
+    def testCompressionIntegrity(self):
+        """Test that compressed data can be decompressed."""
+        output, read, exhausted = compression.CompressStream(
+            self.stream,
+            self.length,
+            9)
+        # Ensure uncompressed data matches the sample data.
+        with gzip.GzipFile(fileobj=output) as f:
+            original = f.read()
+            self.assertEqual(original, self.sample_data)
+        # Ensure we read the entire input stream.
+        self.assertEqual(read, self.length)
+        # Ensure the input stream was exhausted.
+        self.assertTrue(exhausted)
+
+
+class StreamingBufferTest(unittest2.TestCase):
+
+    def setUp(self):
+        self.stream = compression.StreamingBuffer()
+
+    def testSimpleStream(self):
+        """Test simple stream operations.
+
+        Test that the stream can be written to and read from. Also test that
+        reading from the stream consumes the bytes.
+        """
+        # Ensure the stream is empty.
+        self.assertEqual(self.stream.length, 0)
+        # Ensure data is correctly written.
+        self.stream.write(b'Sample data')
+        self.assertEqual(self.stream.length, 11)
+        # Ensure data can be read and the read data is purged from the stream.
+        data = self.stream.read(11)
+        self.assertEqual(data, b'Sample data')
+        self.assertEqual(self.stream.length, 0)
+
+    def testPartialReads(self):
+        """Test partial stream reads.
+
+        Test that the stream can be read in chunks while perserving the
+        consumption mechanics.
+        """
+        self.stream.write(b'Sample data')
+        # Ensure data can be read and the read data is purged from the stream.
+        data = self.stream.read(6)
+        self.assertEqual(data, b'Sample')
+        self.assertEqual(self.stream.length, 5)
+        # Ensure the remaining data can be read.
+        data = self.stream.read(5)
+        self.assertEqual(data, b' data')
+        self.assertEqual(self.stream.length, 0)
+
+    def testTooShort(self):
+        """Test excessive stream reads.
+
+        Test that more data can be requested from the stream than available
+        without raising an exception.
+        """
+        self.stream.write(b'Sample')
+        # Ensure requesting more data than available does not raise an
+        # exception.
+        data = self.stream.read(100)
+        self.assertEqual(data, b'Sample')
+        self.assertEqual(self.stream.length, 0)
diff --git a/apitools/base/py/credentials_lib.py b/apitools/base/py/credentials_lib.py
index 913c144..bf39285 100644
--- a/apitools/base/py/credentials_lib.py
+++ b/apitools/base/py/credentials_lib.py
@@ -17,6 +17,7 @@
 """Common credentials classes and constructors."""
 from __future__ import print_function
 
+import contextlib
 import datetime
 import json
 import os
@@ -28,12 +29,27 @@
 import oauth2client.client
 from oauth2client import service_account
 from oauth2client import tools  # for gflags declarations
+import six
 from six.moves import http_client
 from six.moves import urllib
 
 from apitools.base.py import exceptions
 from apitools.base.py import util
 
+# App Engine does not support ctypes which are required for the
+# monotonic time used in fasteners. Conversely, App Engine does
+# not support colocated concurrent processes, so process locks
+# are not needed.
+try:
+    import fasteners
+    _FASTENERS_AVAILABLE = True
+except ImportError as import_error:
+    server_env = os.environ.get('SERVER_SOFTWARE', '')
+    if not (server_env.startswith('Development') or
+            server_env.startswith('Google App Engine')):
+        raise import_error
+    _FASTENERS_AVAILABLE = False
+
 # Note: we try the oauth2client imports two ways, to accomodate layout
 # changes in oauth2client 2.0+. We can remove these once we no longer
 # support oauth2client < 2.0.
@@ -45,14 +61,14 @@
     from oauth2client import gce
 
 try:
-    from oauth2client.contrib import locked_file
+    from oauth2client.contrib import multiprocess_file_storage
+    _NEW_FILESTORE = True
 except ImportError:
-    from oauth2client import locked_file
-
-try:
-    from oauth2client.contrib import multistore_file
-except ImportError:
-    from oauth2client import multistore_file
+    _NEW_FILESTORE = False
+    try:
+        from oauth2client.contrib import multistore_file
+    except ImportError:
+        from oauth2client import multistore_file
 
 try:
     import gflags
@@ -187,25 +203,12 @@
         return credentials
     else:
         # oauth2client < 2.0.0
-        with open(private_key_filename) as key_file:
+        with open(private_key_filename, 'rb') as key_file:
             return oauth2client.client.SignedJwtAssertionCredentials(
                 service_account_name, key_file.read(), scopes,
                 user_agent=user_agent)
 
 
-def _EnsureFileExists(filename):
-    """Touches a file; returns False on error, True on success."""
-    if not os.path.exists(filename):
-        old_umask = os.umask(0o177)
-        try:
-            open(filename, 'a+b').close()
-        except OSError:
-            return False
-        finally:
-            os.umask(old_umask)
-    return True
-
-
 def _GceMetadataRequest(relative_url, use_metadata_ip=False):
     """Request the given url from the GCE metadata service."""
     if use_metadata_ip:
@@ -247,7 +250,9 @@
         # identified these scopes in the same execution. However, the
         # available scopes don't change once an instance is created,
         # so there is no reason to perform more than one query.
-        self.__service_account_name = service_account_name
+        self.__service_account_name = six.ensure_text(
+            service_account_name,
+            encoding='utf-8',)
         cached_scopes = None
         cache_filename = kwds.get('cache_filename')
         if cache_filename:
@@ -265,7 +270,7 @@
         # catch and squelch the warning.
         with warnings.catch_warnings():
             warnings.simplefilter('ignore')
-            super(GceAssertionCredentials, self).__init__(scopes, **kwds)
+            super(GceAssertionCredentials, self).__init__(scope=scopes, **kwds)
 
     @classmethod
     def Get(cls, *args, **kwds):
@@ -288,29 +293,20 @@
             'scopes': sorted(list(scopes)) if scopes else None,
             'svc_acct_name': self.__service_account_name,
         }
-        with cache_file_lock:
-            if _EnsureFileExists(cache_filename):
-                cache_file = locked_file.LockedFile(
-                    cache_filename, 'r+b', 'rb')
-                try:
-                    cache_file.open_and_lock()
-                    cached_creds_str = cache_file.file_handle().read()
-                    if cached_creds_str:
-                        # Cached credentials metadata dict.
-                        cached_creds = json.loads(cached_creds_str)
-                        if (creds['svc_acct_name'] ==
-                                cached_creds['svc_acct_name']):
-                            if (creds['scopes'] in
-                                    (None, cached_creds['scopes'])):
-                                scopes = cached_creds['scopes']
-                except KeyboardInterrupt:
-                    raise
-                except:  # pylint: disable=bare-except
-                    # Treat exceptions as a cache miss.
-                    pass
-                finally:
-                    cache_file.unlock_and_close()
-        return scopes
+        cache_file = _MultiProcessCacheFile(cache_filename)
+        try:
+            cached_creds_str = cache_file.LockedRead()
+            if not cached_creds_str:
+                return None
+            cached_creds = json.loads(cached_creds_str)
+            if creds['svc_acct_name'] == cached_creds['svc_acct_name']:
+                if creds['scopes'] in (None, cached_creds['scopes']):
+                    return cached_creds['scopes']
+        except KeyboardInterrupt:
+            raise
+        except:  # pylint: disable=bare-except
+            # Treat exceptions as a cache miss.
+            pass
 
     def _WriteCacheFile(self, cache_filename, scopes):
         """Writes the credential metadata to the cache file.
@@ -322,28 +318,19 @@
           cache_filename: Cache filename to check.
           scopes: Scopes for the desired credentials.
         """
-        with cache_file_lock:
-            if _EnsureFileExists(cache_filename):
-                cache_file = locked_file.LockedFile(
-                    cache_filename, 'r+b', 'rb')
-                try:
-                    cache_file.open_and_lock()
-                    if cache_file.is_locked():
-                        creds = {  # Credentials metadata dict.
-                            'scopes': sorted(list(scopes)),
-                            'svc_acct_name': self.__service_account_name}
-                        cache_file.file_handle().write(
-                            json.dumps(creds, encoding='ascii'))
-                        # If it's not locked, the locking process will
-                        # write the same data to the file, so just
-                        # continue.
-                except KeyboardInterrupt:
-                    raise
-                except:  # pylint: disable=bare-except
-                    # Treat exceptions as a cache miss.
-                    pass
-                finally:
-                    cache_file.unlock_and_close()
+        # Credentials metadata dict.
+        scopes = sorted([six.ensure_text(scope) for scope in scopes])
+        creds = {'scopes': scopes,
+                 'svc_acct_name': self.__service_account_name}
+        creds_str = json.dumps(creds)
+        cache_file = _MultiProcessCacheFile(cache_filename)
+        try:
+            cache_file.LockedWrite(creds_str)
+        except KeyboardInterrupt:
+            raise
+        except:  # pylint: disable=bare-except
+            # Treat exceptions as a cache miss.
+            pass
 
     def _ScopesFromMetadataServer(self, scopes):
         """Returns instance scopes based on GCE metadata server."""
@@ -368,7 +355,7 @@
     def GetServiceAccount(self, account):
         relative_url = 'instance/service-accounts'
         response = _GceMetadataRequest(relative_url)
-        response_lines = [line.rstrip('/\n\r')
+        response_lines = [six.ensure_str(line).rstrip(u'/\n\r')
                           for line in response.readlines()]
         return account in response_lines
 
@@ -376,7 +363,7 @@
         relative_url = 'instance/service-accounts/{0}/scopes'.format(
             self.__service_account_name)
         response = _GceMetadataRequest(relative_url)
-        return util.NormalizeScopes(scope.strip()
+        return util.NormalizeScopes(six.ensure_str(scope).strip()
                                     for scope in response.readlines())
 
     # pylint: disable=arguments-differ
@@ -409,7 +396,7 @@
             if self.store:
                 self.store.locked_put(self)
             raise
-        content = response.read()
+        content = six.ensure_str(response.read())
         try:
             credential_info = json.loads(content)
         except ValueError:
@@ -428,14 +415,26 @@
         if self.store:
             self.store.locked_put(self)
 
+    def to_json(self):
+        # OAuth2Client made gce.AppAssertionCredentials unserializable as of
+        # v3.0, but we need those credentials to be serializable for use with
+        # this library, so we use AppAssertionCredentials' parent's to_json
+        # method.
+        # pylint: disable=bad-super-call
+        return super(gce.AppAssertionCredentials, self).to_json()
+
     @classmethod
     def from_json(cls, json_data):
         data = json.loads(json_data)
         kwargs = {}
         if 'cache_filename' in data.get('kwargs', []):
             kwargs['cache_filename'] = data['kwargs']['cache_filename']
-        credentials = GceAssertionCredentials(scopes=[data['scope']],
-                                              **kwargs)
+        # Newer versions of GceAssertionCredentials don't have a "scope"
+        # attribute.
+        scope_list = None
+        if 'scope' in data:
+            scope_list = [data['scope']]
+        credentials = GceAssertionCredentials(scopes=scope_list, **kwargs)
         if 'access_token' in data:
             credentials.access_token = data['access_token']
         if 'token_expiry' in data:
@@ -537,11 +536,18 @@
 # TODO(craigcitro): Switch this from taking a path to taking a stream.
 def CredentialsFromFile(path, client_info, oauth2client_args=None):
     """Read credentials from a file."""
-    credential_store = multistore_file.get_credential_storage(
-        path,
-        client_info['client_id'],
-        client_info['user_agent'],
-        client_info['scope'])
+    user_agent = client_info['user_agent']
+    scope_key = client_info['scope']
+    if not isinstance(scope_key, six.string_types):
+        scope_key = ':'.join(scope_key)
+    storage_key = client_info['client_id'] + user_agent + scope_key
+
+    if _NEW_FILESTORE:
+        credential_store = multiprocess_file_storage.MultiprocessFileStorage(
+            path, storage_key)
+    else:
+        credential_store = multistore_file.get_credential_storage_custom_string_key(  # noqa
+            path, storage_key)
     if hasattr(FLAGS, 'auth_local_webserver'):
         FLAGS.auth_local_webserver = False
     credentials = credential_store.get()
@@ -568,6 +574,115 @@
     return credentials
 
 
+class _MultiProcessCacheFile(object):
+    """Simple multithreading and multiprocessing safe cache file.
+
+    Notes on behavior:
+    * the fasteners.InterProcessLock object cannot reliably prevent threads
+      from double-acquiring a lock. A threading lock is used in addition to
+      the InterProcessLock. The threading lock is always acquired first and
+      released last.
+    * The interprocess lock will not deadlock. If a process can not acquire
+      the interprocess lock within `_lock_timeout` the call will return as
+      a cache miss or unsuccessful cache write.
+    * App Engine environments cannot be process locked because (1) the runtime
+      does not provide monotonic time and (2) different processes may or may
+      not share the same machine. Because of this, process locks are disabled
+      and locking is only guaranteed to protect against multithreaded access.
+    """
+
+    _lock_timeout = 1
+    _encoding = 'utf-8'
+    _thread_lock = threading.Lock()
+
+    def __init__(self, filename):
+        self._file = None
+        self._filename = filename
+        if _FASTENERS_AVAILABLE:
+            self._process_lock_getter = self._ProcessLockAcquired
+            self._process_lock = fasteners.InterProcessLock(
+                '{0}.lock'.format(filename))
+        else:
+            self._process_lock_getter = self._DummyLockAcquired
+            self._process_lock = None
+
+    @contextlib.contextmanager
+    def _ProcessLockAcquired(self):
+        """Context manager for process locks with timeout."""
+        try:
+            is_locked = self._process_lock.acquire(timeout=self._lock_timeout)
+            yield is_locked
+        finally:
+            if is_locked:
+                self._process_lock.release()
+
+    @contextlib.contextmanager
+    def _DummyLockAcquired(self):
+        """Lock context manager for environments without process locks."""
+        yield True
+
+    def LockedRead(self):
+        """Acquire an interprocess lock and dump cache contents.
+
+        This method safely acquires the locks then reads a string
+        from the cache file. If the file does not exist and cannot
+        be created, it will return None. If the locks cannot be
+        acquired, this will also return None.
+
+        Returns:
+          cache data - string if present, None on failure.
+        """
+        file_contents = None
+        with self._thread_lock:
+            if not self._EnsureFileExists():
+                return None
+            with self._process_lock_getter() as acquired_plock:
+                if not acquired_plock:
+                    return None
+                with open(self._filename, 'rb') as f:
+                    file_contents = f.read().decode(encoding=self._encoding)
+        return file_contents
+
+    def LockedWrite(self, cache_data):
+        """Acquire an interprocess lock and write a string.
+
+        This method safely acquires the locks then writes a string
+        to the cache file. If the string is written successfully
+        the function will return True, if the write fails for any
+        reason it will return False.
+
+        Args:
+          cache_data: string or bytes to write.
+
+        Returns:
+          bool: success
+        """
+        if isinstance(cache_data, six.text_type):
+            cache_data = cache_data.encode(encoding=self._encoding)
+
+        with self._thread_lock:
+            if not self._EnsureFileExists():
+                return False
+            with self._process_lock_getter() as acquired_plock:
+                if not acquired_plock:
+                    return False
+                with open(self._filename, 'wb') as f:
+                    f.write(cache_data)
+                return True
+
+    def _EnsureFileExists(self):
+        """Touches a file; returns False on error, True on success."""
+        if not os.path.exists(self._filename):
+            old_umask = os.umask(0o177)
+            try:
+                open(self._filename, 'a+b').close()
+            except OSError:
+                return False
+            finally:
+                os.umask(old_umask)
+        return True
+
+
 # TODO(craigcitro): Push this into oauth2client.
 def GetUserinfo(credentials, http=None):  # pylint: disable=invalid-name
     """Get the userinfo associated with the given credentials.
@@ -596,7 +711,7 @@
 
 
 def _GetUserinfoUrl(credentials):
-    url_root = 'https://www.googleapis.com/oauth2/v2/tokeninfo'
+    url_root = 'https://oauth2.googleapis.com/tokeninfo'
     query_args = {'access_token': credentials.access_token}
     return '?'.join((url_root, urllib.parse.urlencode(query_args)))
 
diff --git a/apitools/base/py/credentials_lib_test.py b/apitools/base/py/credentials_lib_test.py
index 1bf5aa7..80b970c 100644
--- a/apitools/base/py/credentials_lib_test.py
+++ b/apitools/base/py/credentials_lib_test.py
@@ -13,6 +13,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import json
+import os.path
+import shutil
+import tempfile
+
 import mock
 import six
 import unittest2
@@ -21,43 +26,103 @@
 from apitools.base.py import util
 
 
+class MetadataMock(object):
+
+    def __init__(self, scopes=None, service_account_name=None):
+        self._scopes = scopes or ['scope1']
+        self._sa = service_account_name or 'default'
+
+    def __call__(self, request_url):
+        if request_url.endswith('scopes'):
+            return six.StringIO(''.join(self._scopes))
+        elif request_url.endswith('service-accounts'):
+            return six.StringIO(self._sa)
+        elif request_url.endswith(
+                '/service-accounts/%s/token' % self._sa):
+            return six.StringIO('{"access_token": "token"}')
+        self.fail('Unexpected HTTP request to %s' % request_url)
+
+
 class CredentialsLibTest(unittest2.TestCase):
 
-    def _GetServiceCreds(self, service_account_name=None, scopes=None):
+    def _RunGceAssertionCredentials(
+            self, service_account_name=None, scopes=None, cache_filename=None):
         kwargs = {}
         if service_account_name is not None:
             kwargs['service_account_name'] = service_account_name
+        if cache_filename is not None:
+            kwargs['cache_filename'] = cache_filename
         service_account_name = service_account_name or 'default'
+        credentials = credentials_lib.GceAssertionCredentials(
+            scopes, **kwargs)
+        self.assertIsNone(credentials._refresh(None))
+        return credentials
 
-        def MockMetadataCalls(request_url):
-            default_scopes = scopes or ['scope1']
-            if request_url.endswith('scopes'):
-                return six.StringIO(''.join(default_scopes))
-            elif request_url.endswith('service-accounts'):
-                return six.StringIO(service_account_name)
-            elif request_url.endswith(
-                    '/service-accounts/%s/token' % service_account_name):
-                return six.StringIO('{"access_token": "token"}')
-            self.fail('Unexpected HTTP request to %s' % request_url)
-
-        with mock.patch.object(credentials_lib, '_GceMetadataRequest',
-                               side_effect=MockMetadataCalls,
-                               autospec=True) as opener_mock:
-            with mock.patch.object(util, 'DetectGce',
-                                   autospec=True) as mock_detect:
-                mock_detect.return_value = True
-                credentials = credentials_lib.GceAssertionCredentials(
-                    scopes, **kwargs)
-                self.assertIsNone(credentials._refresh(None))
+    def _GetServiceCreds(self, service_account_name=None, scopes=None):
+        metadatamock = MetadataMock(scopes, service_account_name)
+        with mock.patch.object(util, 'DetectGce', autospec=True) as gce_detect:
+            gce_detect.return_value = True
+            with mock.patch.object(credentials_lib,
+                                   '_GceMetadataRequest',
+                                   side_effect=metadatamock,
+                                   autospec=True) as opener_mock:
+                credentials = self._RunGceAssertionCredentials(
+                    service_account_name=service_account_name,
+                    scopes=scopes)
             self.assertEqual(3, opener_mock.call_count)
         return credentials
 
     def testGceServiceAccounts(self):
         scopes = ['scope1']
-        self._GetServiceCreds()
-        self._GetServiceCreds(scopes=scopes)
-        self._GetServiceCreds(service_account_name='my_service_account',
+        self._GetServiceCreds(service_account_name=None,
+                              scopes=None)
+        self._GetServiceCreds(service_account_name=None,
                               scopes=scopes)
+        self._GetServiceCreds(
+            service_account_name='my_service_account',
+            scopes=scopes)
+
+    def testGceAssertionCredentialsToJson(self):
+        scopes = ['scope1']
+        service_account_name = 'my_service_account'
+        # Ensure that we can obtain a JSON representation of
+        # GceAssertionCredentials to put in a credential Storage object, and
+        # that the JSON representation is valid.
+        original_creds = self._GetServiceCreds(
+            service_account_name=service_account_name,
+            scopes=scopes)
+        original_creds_json_str = original_creds.to_json()
+        json.loads(original_creds_json_str)
+
+    @mock.patch.object(util, 'DetectGce', autospec=True)
+    def testGceServiceAccountsCached(self, mock_detect):
+        mock_detect.return_value = True
+        tempd = tempfile.mkdtemp()
+        tempname = os.path.join(tempd, 'creds')
+        scopes = ['scope1']
+        service_account_name = 'some_service_account_name'
+        metadatamock = MetadataMock(scopes, service_account_name)
+        with mock.patch.object(credentials_lib,
+                               '_GceMetadataRequest',
+                               side_effect=metadatamock,
+                               autospec=True) as opener_mock:
+            try:
+                creds1 = self._RunGceAssertionCredentials(
+                    service_account_name=service_account_name,
+                    cache_filename=tempname,
+                    scopes=scopes)
+                pre_cache_call_count = opener_mock.call_count
+                creds2 = self._RunGceAssertionCredentials(
+                    service_account_name=service_account_name,
+                    cache_filename=tempname,
+                    scopes=None)
+            finally:
+                shutil.rmtree(tempd)
+        self.assertEqual(creds1.client_id, creds2.client_id)
+        self.assertEqual(pre_cache_call_count, 3)
+        # Caching obviates the need for extra metadata server requests.
+        # Only one metadata request is made if the cache is hit.
+        self.assertEqual(opener_mock.call_count, 4)
 
     def testGetServiceAccount(self):
         # We'd also like to test the metadata calls, which requires
diff --git a/apitools/base/py/encoding.py b/apitools/base/py/encoding.py
index 598f6e6..25b02d3 100644
--- a/apitools/base/py/encoding.py
+++ b/apitools/base/py/encoding.py
@@ -16,20 +16,12 @@
 
 """Common code for converting proto to other formats, such as JSON."""
 
-import base64
-import collections
-import datetime
-import json
-import os
-import sys
+# pylint:disable=wildcard-import
+from apitools.base.py.encoding_helper import *
+import apitools.base.py.extra_types  # pylint:disable=unused-import
 
-import six
 
-from apitools.base.protorpclite import message_types
-from apitools.base.protorpclite import messages
-from apitools.base.protorpclite import protojson
-from apitools.base.py import exceptions
-
+# pylint:disable=undefined-all-variable
 __all__ = [
     'CopyProtoMessage',
     'JsonToMessage',
@@ -44,688 +36,3 @@
     'GetCustomJsonEnumMapping',
     'AddCustomJsonEnumMapping',
 ]
-
-
-_Codec = collections.namedtuple('_Codec', ['encoder', 'decoder'])
-CodecResult = collections.namedtuple('CodecResult', ['value', 'complete'])
-
-
-# TODO(craigcitro): Make these non-global.
-_UNRECOGNIZED_FIELD_MAPPINGS = {}
-_CUSTOM_MESSAGE_CODECS = {}
-_CUSTOM_FIELD_CODECS = {}
-_FIELD_TYPE_CODECS = {}
-
-
-def MapUnrecognizedFields(field_name):
-    """Register field_name as a container for unrecognized fields."""
-    def Register(cls):
-        _UNRECOGNIZED_FIELD_MAPPINGS[cls] = field_name
-        return cls
-    return Register
-
-
-def RegisterCustomMessageCodec(encoder, decoder):
-    """Register a custom encoder/decoder for this message class."""
-    def Register(cls):
-        _CUSTOM_MESSAGE_CODECS[cls] = _Codec(encoder=encoder, decoder=decoder)
-        return cls
-    return Register
-
-
-def RegisterCustomFieldCodec(encoder, decoder):
-    """Register a custom encoder/decoder for this field."""
-    def Register(field):
-        _CUSTOM_FIELD_CODECS[field] = _Codec(encoder=encoder, decoder=decoder)
-        return field
-    return Register
-
-
-def RegisterFieldTypeCodec(encoder, decoder):
-    """Register a custom encoder/decoder for all fields of this type."""
-    def Register(field_type):
-        _FIELD_TYPE_CODECS[field_type] = _Codec(
-            encoder=encoder, decoder=decoder)
-        return field_type
-    return Register
-
-
-# TODO(craigcitro): Delete this function with the switch to proto2.
-def CopyProtoMessage(message):
-    codec = protojson.ProtoJson()
-    return codec.decode_message(type(message), codec.encode_message(message))
-
-
-def MessageToJson(message, include_fields=None):
-    """Convert the given message to JSON."""
-    result = _ProtoJsonApiTools.Get().encode_message(message)
-    return _IncludeFields(result, message, include_fields)
-
-
-def JsonToMessage(message_type, message):
-    """Convert the given JSON to a message of type message_type."""
-    return _ProtoJsonApiTools.Get().decode_message(message_type, message)
-
-
-# TODO(craigcitro): Do this directly, instead of via JSON.
-def DictToMessage(d, message_type):
-    """Convert the given dictionary to a message of type message_type."""
-    return JsonToMessage(message_type, json.dumps(d))
-
-
-def MessageToDict(message):
-    """Convert the given message to a dictionary."""
-    return json.loads(MessageToJson(message))
-
-
-def DictToProtoMap(properties, additional_property_type, sort_items=False):
-    """Convert the given dictionary to an AdditionalProperty message."""
-    items = properties.items()
-    if sort_items:
-        items = sorted(items)
-    map_ = []
-    for key, value in items:
-        map_.append(additional_property_type.AdditionalProperty(
-            key=key, value=value))
-    return additional_property_type(additional_properties=map_)
-
-
-def PyValueToMessage(message_type, value):
-    """Convert the given python value to a message of type message_type."""
-    return JsonToMessage(message_type, json.dumps(value))
-
-
-def MessageToPyValue(message):
-    """Convert the given message to a python value."""
-    return json.loads(MessageToJson(message))
-
-
-def MessageToRepr(msg, multiline=False, **kwargs):
-    """Return a repr-style string for a protorpc message.
-
-    protorpc.Message.__repr__ does not return anything that could be considered
-    python code. Adding this function lets us print a protorpc message in such
-    a way that it could be pasted into code later, and used to compare against
-    other things.
-
-    Args:
-      msg: protorpc.Message, the message to be repr'd.
-      multiline: bool, True if the returned string should have each field
-          assignment on its own line.
-      **kwargs: {str:str}, Additional flags for how to format the string.
-
-    Known **kwargs:
-      shortstrings: bool, True if all string values should be
-          truncated at 100 characters, since when mocking the contents
-          typically don't matter except for IDs, and IDs are usually
-          less than 100 characters.
-      no_modules: bool, True if the long module name should not be printed with
-          each type.
-
-    Returns:
-      str, A string of valid python (assuming the right imports have been made)
-      that recreates the message passed into this function.
-
-    """
-
-    # TODO(jasmuth): craigcitro suggests a pretty-printer from apitools/gen.
-
-    indent = kwargs.get('indent', 0)
-
-    def IndentKwargs(kwargs):
-        kwargs = dict(kwargs)
-        kwargs['indent'] = kwargs.get('indent', 0) + 4
-        return kwargs
-
-    if isinstance(msg, list):
-        s = '['
-        for item in msg:
-            if multiline:
-                s += '\n' + ' ' * (indent + 4)
-            s += MessageToRepr(
-                item, multiline=multiline, **IndentKwargs(kwargs)) + ','
-        if multiline:
-            s += '\n' + ' ' * indent
-        s += ']'
-        return s
-
-    if isinstance(msg, messages.Message):
-        s = type(msg).__name__ + '('
-        if not kwargs.get('no_modules'):
-            s = msg.__module__ + '.' + s
-        names = sorted([field.name for field in msg.all_fields()])
-        for name in names:
-            field = msg.field_by_name(name)
-            if multiline:
-                s += '\n' + ' ' * (indent + 4)
-            value = getattr(msg, field.name)
-            s += field.name + '=' + MessageToRepr(
-                value, multiline=multiline, **IndentKwargs(kwargs)) + ','
-        if multiline:
-            s += '\n' + ' ' * indent
-        s += ')'
-        return s
-
-    if isinstance(msg, six.string_types):
-        if kwargs.get('shortstrings') and len(msg) > 100:
-            msg = msg[:100]
-
-    if isinstance(msg, datetime.datetime):
-
-        class SpecialTZInfo(datetime.tzinfo):
-
-            def __init__(self, offset):
-                super(SpecialTZInfo, self).__init__()
-                self.offset = offset
-
-            def __repr__(self):
-                s = 'TimeZoneOffset(' + repr(self.offset) + ')'
-                if not kwargs.get('no_modules'):
-                    s = 'apitools.base.protorpclite.util.' + s
-                return s
-
-        msg = datetime.datetime(
-            msg.year, msg.month, msg.day, msg.hour, msg.minute, msg.second,
-            msg.microsecond, SpecialTZInfo(msg.tzinfo.utcoffset(0)))
-
-    return repr(msg)
-
-
-def _GetField(message, field_path):
-    for field in field_path:
-        if field not in dir(message):
-            raise KeyError('no field "%s"' % field)
-        message = getattr(message, field)
-    return message
-
-
-def _SetField(dictblob, field_path, value):
-    for field in field_path[:-1]:
-        dictblob = dictblob.setdefault(field, {})
-    dictblob[field_path[-1]] = value
-
-
-def _IncludeFields(encoded_message, message, include_fields):
-    """Add the requested fields to the encoded message."""
-    if include_fields is None:
-        return encoded_message
-    result = json.loads(encoded_message)
-    for field_name in include_fields:
-        try:
-            value = _GetField(message, field_name.split('.'))
-            nullvalue = None
-            if isinstance(value, list):
-                nullvalue = []
-        except KeyError:
-            raise exceptions.InvalidDataError(
-                'No field named %s in message of type %s' % (
-                    field_name, type(message)))
-        _SetField(result, field_name.split('.'), nullvalue)
-    return json.dumps(result)
-
-
-def _GetFieldCodecs(field, attr):
-    result = [
-        getattr(_CUSTOM_FIELD_CODECS.get(field), attr, None),
-        getattr(_FIELD_TYPE_CODECS.get(type(field)), attr, None),
-    ]
-    return [x for x in result if x is not None]
-
-
-class _ProtoJsonApiTools(protojson.ProtoJson):
-
-    """JSON encoder used by apitools clients."""
-    _INSTANCE = None
-
-    @classmethod
-    def Get(cls):
-        if cls._INSTANCE is None:
-            cls._INSTANCE = cls()
-        return cls._INSTANCE
-
-    def decode_message(self, message_type, encoded_message):
-        if message_type in _CUSTOM_MESSAGE_CODECS:
-            return _CUSTOM_MESSAGE_CODECS[
-                message_type].decoder(encoded_message)
-        result = _DecodeCustomFieldNames(message_type, encoded_message)
-        result = super(_ProtoJsonApiTools, self).decode_message(
-            message_type, result)
-        result = _ProcessUnknownEnums(result, encoded_message)
-        result = _ProcessUnknownMessages(result, encoded_message)
-        return _DecodeUnknownFields(result, encoded_message)
-
-    def decode_field(self, field, value):
-        """Decode the given JSON value.
-
-        Args:
-          field: a messages.Field for the field we're decoding.
-          value: a python value we'd like to decode.
-
-        Returns:
-          A value suitable for assignment to field.
-        """
-        for decoder in _GetFieldCodecs(field, 'decoder'):
-            result = decoder(field, value)
-            value = result.value
-            if result.complete:
-                return value
-        if isinstance(field, messages.MessageField):
-            field_value = self.decode_message(
-                field.message_type, json.dumps(value))
-        elif isinstance(field, messages.EnumField):
-            value = GetCustomJsonEnumMapping(
-                field.type, json_name=value) or value
-            try:
-                field_value = super(
-                    _ProtoJsonApiTools, self).decode_field(field, value)
-            except messages.DecodeError:
-                if not isinstance(value, six.string_types):
-                    raise
-                field_value = None
-        else:
-            field_value = super(
-                _ProtoJsonApiTools, self).decode_field(field, value)
-        return field_value
-
-    def encode_message(self, message):
-        if isinstance(message, messages.FieldList):
-            return '[%s]' % (', '.join(self.encode_message(x)
-                                       for x in message))
-
-        # pylint: disable=unidiomatic-typecheck
-        if type(message) in _CUSTOM_MESSAGE_CODECS:
-            return _CUSTOM_MESSAGE_CODECS[type(message)].encoder(message)
-
-        message = _EncodeUnknownFields(message)
-        result = super(_ProtoJsonApiTools, self).encode_message(message)
-        result = _EncodeCustomFieldNames(message, result)
-        return json.dumps(json.loads(result), sort_keys=True)
-
-    def encode_field(self, field, value):
-        """Encode the given value as JSON.
-
-        Args:
-          field: a messages.Field for the field we're encoding.
-          value: a value for field.
-
-        Returns:
-          A python value suitable for json.dumps.
-        """
-        for encoder in _GetFieldCodecs(field, 'encoder'):
-            result = encoder(field, value)
-            value = result.value
-            if result.complete:
-                return value
-        if isinstance(field, messages.EnumField):
-            if field.repeated:
-                remapped_value = [GetCustomJsonEnumMapping(
-                    field.type, python_name=e.name) or e.name for e in value]
-            else:
-                remapped_value = GetCustomJsonEnumMapping(
-                    field.type, python_name=value.name)
-            if remapped_value:
-                return remapped_value
-        if (isinstance(field, messages.MessageField) and
-                not isinstance(field, message_types.DateTimeField)):
-            value = json.loads(self.encode_message(value))
-        return super(_ProtoJsonApiTools, self).encode_field(field, value)
-
-
-# TODO(craigcitro): Fold this and _IncludeFields in as codecs.
-def _DecodeUnknownFields(message, encoded_message):
-    """Rewrite unknown fields in message into message.destination."""
-    destination = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
-    if destination is None:
-        return message
-    pair_field = message.field_by_name(destination)
-    if not isinstance(pair_field, messages.MessageField):
-        raise exceptions.InvalidDataFromServerError(
-            'Unrecognized fields must be mapped to a compound '
-            'message type.')
-    pair_type = pair_field.message_type
-    # TODO(craigcitro): Add more error checking around the pair
-    # type being exactly what we suspect (field names, etc).
-    if isinstance(pair_type.value, messages.MessageField):
-        new_values = _DecodeUnknownMessages(
-            message, json.loads(encoded_message), pair_type)
-    else:
-        new_values = _DecodeUnrecognizedFields(message, pair_type)
-    setattr(message, destination, new_values)
-    # We could probably get away with not setting this, but
-    # why not clear it?
-    setattr(message, '_Message__unrecognized_fields', {})
-    return message
-
-
-def _DecodeUnknownMessages(message, encoded_message, pair_type):
-    """Process unknown fields in encoded_message of a message type."""
-    field_type = pair_type.value.type
-    new_values = []
-    all_field_names = [x.name for x in message.all_fields()]
-    for name, value_dict in six.iteritems(encoded_message):
-        if name in all_field_names:
-            continue
-        value = PyValueToMessage(field_type, value_dict)
-        if pair_type.value.repeated:
-            value = _AsMessageList(value)
-        new_pair = pair_type(key=name, value=value)
-        new_values.append(new_pair)
-    return new_values
-
-
-def _DecodeUnrecognizedFields(message, pair_type):
-    """Process unrecognized fields in message."""
-    new_values = []
-    for unknown_field in message.all_unrecognized_fields():
-        # TODO(craigcitro): Consider validating the variant if
-        # the assignment below doesn't take care of it. It may
-        # also be necessary to check it in the case that the
-        # type has multiple encodings.
-        value, _ = message.get_unrecognized_field_info(unknown_field)
-        value_type = pair_type.field_by_name('value')
-        if isinstance(value_type, messages.MessageField):
-            decoded_value = DictToMessage(value, pair_type.value.message_type)
-        else:
-            decoded_value = protojson.ProtoJson().decode_field(
-                pair_type.value, value)
-        new_pair = pair_type(key=str(unknown_field), value=decoded_value)
-        new_values.append(new_pair)
-    return new_values
-
-
-def _EncodeUnknownFields(message):
-    """Remap unknown fields in message out of message.source."""
-    source = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
-    if source is None:
-        return message
-    result = CopyProtoMessage(message)
-    pairs_field = message.field_by_name(source)
-    if not isinstance(pairs_field, messages.MessageField):
-        raise exceptions.InvalidUserInputError(
-            'Invalid pairs field %s' % pairs_field)
-    pairs_type = pairs_field.message_type
-    value_variant = pairs_type.field_by_name('value').variant
-    pairs = getattr(message, source)
-    for pair in pairs:
-        if value_variant == messages.Variant.MESSAGE:
-            encoded_value = MessageToDict(pair.value)
-        else:
-            encoded_value = pair.value
-        result.set_unrecognized_field(pair.key, encoded_value, value_variant)
-    setattr(result, source, [])
-    return result
-
-
-def _SafeEncodeBytes(field, value):
-    """Encode the bytes in value as urlsafe base64."""
-    try:
-        if field.repeated:
-            result = [base64.urlsafe_b64encode(byte) for byte in value]
-        else:
-            result = base64.urlsafe_b64encode(value)
-        complete = True
-    except TypeError:
-        result = value
-        complete = False
-    return CodecResult(value=result, complete=complete)
-
-
-def _SafeDecodeBytes(unused_field, value):
-    """Decode the urlsafe base64 value into bytes."""
-    try:
-        result = base64.urlsafe_b64decode(str(value))
-        complete = True
-    except TypeError:
-        result = value
-        complete = False
-    return CodecResult(value=result, complete=complete)
-
-
-def _ProcessUnknownEnums(message, encoded_message):
-    """Add unknown enum values from encoded_message as unknown fields.
-
-    ProtoRPC diverges from the usual protocol buffer behavior here and
-    doesn't allow unknown fields. Throwing on unknown fields makes it
-    impossible to let servers add new enum values and stay compatible
-    with older clients, which isn't reasonable for us. We simply store
-    unrecognized enum values as unknown fields, and all is well.
-
-    Args:
-      message: Proto message we've decoded thus far.
-      encoded_message: JSON string we're decoding.
-
-    Returns:
-      message, with any unknown enums stored as unrecognized fields.
-    """
-    if not encoded_message:
-        return message
-    decoded_message = json.loads(encoded_message)
-    for field in message.all_fields():
-        if (isinstance(field, messages.EnumField) and
-                field.name in decoded_message and
-                message.get_assigned_value(field.name) is None):
-            message.set_unrecognized_field(
-                field.name, decoded_message[field.name], messages.Variant.ENUM)
-    return message
-
-
-def _ProcessUnknownMessages(message, encoded_message):
-    """Store any remaining unknown fields as strings.
-
-    ProtoRPC currently ignores unknown values for which no type can be
-    determined (and logs a "No variant found" message). For the purposes
-    of reserializing, this is quite harmful (since it throws away
-    information). Here we simply add those as unknown fields of type
-    string (so that they can easily be reserialized).
-
-    Args:
-      message: Proto message we've decoded thus far.
-      encoded_message: JSON string we're decoding.
-
-    Returns:
-      message, with any remaining unrecognized fields saved.
-    """
-    if not encoded_message:
-        return message
-    decoded_message = json.loads(encoded_message)
-    message_fields = [x.name for x in message.all_fields()] + list(
-        message.all_unrecognized_fields())
-    missing_fields = [x for x in decoded_message.keys()
-                      if x not in message_fields]
-    for field_name in missing_fields:
-        message.set_unrecognized_field(field_name, decoded_message[field_name],
-                                       messages.Variant.STRING)
-    return message
-
-
-RegisterFieldTypeCodec(_SafeEncodeBytes, _SafeDecodeBytes)(messages.BytesField)
-
-
-# Note that these could share a dictionary, since they're keyed by
-# distinct types, but it's not really worth it.
-_JSON_ENUM_MAPPINGS = {}
-_JSON_FIELD_MAPPINGS = {}
-
-
-def _GetTypeKey(message_type, package):
-    """Get the prefix for this message type in mapping dicts."""
-    key = message_type.definition_name()
-    if package and key.startswith(package + '.'):
-        module_name = message_type.__module__
-        # We normalize '__main__' to something unique, if possible.
-        if module_name == '__main__':
-            try:
-                file_name = sys.modules[module_name].__file__
-            except (AttributeError, KeyError):
-                pass
-            else:
-                base_name = os.path.basename(file_name)
-                split_name = os.path.splitext(base_name)
-                if len(split_name) == 1:
-                    module_name = unicode(base_name)
-                else:
-                    module_name = u'.'.join(split_name[:-1])
-        key = module_name + '.' + key.partition('.')[2]
-    return key
-
-
-def AddCustomJsonEnumMapping(enum_type, python_name, json_name,
-                             package=''):
-    """Add a custom wire encoding for a given enum value.
-
-    This is primarily used in generated code, to handle enum values
-    which happen to be Python keywords.
-
-    Args:
-      enum_type: (messages.Enum) An enum type
-      python_name: (basestring) Python name for this value.
-      json_name: (basestring) JSON name to be used on the wire.
-      package: (basestring, optional) Package prefix for this enum, if
-          present. We strip this off the enum name in order to generate
-          unique keys.
-    """
-    if not issubclass(enum_type, messages.Enum):
-        raise exceptions.TypecheckError(
-            'Cannot set JSON enum mapping for non-enum "%s"' % enum_type)
-    enum_name = _GetTypeKey(enum_type, package)
-    if python_name not in enum_type.names():
-        raise exceptions.InvalidDataError(
-            'Enum value %s not a value for type %s' % (python_name, enum_type))
-    field_mappings = _JSON_ENUM_MAPPINGS.setdefault(enum_name, {})
-    _CheckForExistingMappings('enum', enum_type, python_name, json_name)
-    field_mappings[python_name] = json_name
-
-
-def AddCustomJsonFieldMapping(message_type, python_name, json_name,
-                              package=''):
-    """Add a custom wire encoding for a given message field.
-
-    This is primarily used in generated code, to handle enum values
-    which happen to be Python keywords.
-
-    Args:
-      message_type: (messages.Message) A message type
-      python_name: (basestring) Python name for this value.
-      json_name: (basestring) JSON name to be used on the wire.
-      package: (basestring, optional) Package prefix for this message, if
-          present. We strip this off the message name in order to generate
-          unique keys.
-    """
-    if not issubclass(message_type, messages.Message):
-        raise exceptions.TypecheckError(
-            'Cannot set JSON field mapping for '
-            'non-message "%s"' % message_type)
-    message_name = _GetTypeKey(message_type, package)
-    try:
-        _ = message_type.field_by_name(python_name)
-    except KeyError:
-        raise exceptions.InvalidDataError(
-            'Field %s not recognized for type %s' % (
-                python_name, message_type))
-    field_mappings = _JSON_FIELD_MAPPINGS.setdefault(message_name, {})
-    _CheckForExistingMappings('field', message_type, python_name, json_name)
-    field_mappings[python_name] = json_name
-
-
-def GetCustomJsonEnumMapping(enum_type, python_name=None, json_name=None):
-    """Return the appropriate remapping for the given enum, or None."""
-    return _FetchRemapping(enum_type.definition_name(), 'enum',
-                           python_name=python_name, json_name=json_name,
-                           mappings=_JSON_ENUM_MAPPINGS)
-
-
-def GetCustomJsonFieldMapping(message_type, python_name=None, json_name=None):
-    """Return the appropriate remapping for the given field, or None."""
-    return _FetchRemapping(message_type.definition_name(), 'field',
-                           python_name=python_name, json_name=json_name,
-                           mappings=_JSON_FIELD_MAPPINGS)
-
-
-def _FetchRemapping(type_name, mapping_type, python_name=None, json_name=None,
-                    mappings=None):
-    """Common code for fetching a key or value from a remapping dict."""
-    if python_name and json_name:
-        raise exceptions.InvalidDataError(
-            'Cannot specify both python_name and json_name '
-            'for %s remapping' % mapping_type)
-    if not (python_name or json_name):
-        raise exceptions.InvalidDataError(
-            'Must specify either python_name or json_name for %s remapping' % (
-                mapping_type,))
-    field_remappings = mappings.get(type_name, {})
-    if field_remappings:
-        if python_name:
-            return field_remappings.get(python_name)
-        elif json_name:
-            if json_name in list(field_remappings.values()):
-                return [k for k in field_remappings
-                        if field_remappings[k] == json_name][0]
-    return None
-
-
-def _CheckForExistingMappings(mapping_type, message_type,
-                              python_name, json_name):
-    """Validate that no mappings exist for the given values."""
-    if mapping_type == 'field':
-        getter = GetCustomJsonFieldMapping
-    elif mapping_type == 'enum':
-        getter = GetCustomJsonEnumMapping
-    remapping = getter(message_type, python_name=python_name)
-    if remapping is not None and remapping != json_name:
-        raise exceptions.InvalidDataError(
-            'Cannot add mapping for %s "%s", already mapped to "%s"' % (
-                mapping_type, python_name, remapping))
-    remapping = getter(message_type, json_name=json_name)
-    if remapping is not None and remapping != python_name:
-        raise exceptions.InvalidDataError(
-            'Cannot add mapping for %s "%s", already mapped to "%s"' % (
-                mapping_type, json_name, remapping))
-
-
-def _EncodeCustomFieldNames(message, encoded_value):
-    message_name = type(message).definition_name()
-    field_remappings = list(_JSON_FIELD_MAPPINGS.get(message_name, {}).items())
-    if field_remappings:
-        decoded_value = json.loads(encoded_value)
-        for python_name, json_name in field_remappings:
-            if python_name in encoded_value:
-                decoded_value[json_name] = decoded_value.pop(python_name)
-        encoded_value = json.dumps(decoded_value)
-    return encoded_value
-
-
-def _DecodeCustomFieldNames(message_type, encoded_message):
-    message_name = message_type.definition_name()
-    field_remappings = _JSON_FIELD_MAPPINGS.get(message_name, {})
-    if field_remappings:
-        decoded_message = json.loads(encoded_message)
-        for python_name, json_name in list(field_remappings.items()):
-            if json_name in decoded_message:
-                decoded_message[python_name] = decoded_message.pop(json_name)
-        encoded_message = json.dumps(decoded_message)
-    return encoded_message
-
-
-def _AsMessageList(msg):
-    """Convert the provided list-as-JsonValue to a list."""
-    # This really needs to live in extra_types, but extra_types needs
-    # to import this file to be able to register codecs.
-    # TODO(craigcitro): Split out a codecs module and fix this ugly
-    # import.
-    from apitools.base.py import extra_types
-
-    def _IsRepeatedJsonValue(msg):
-        """Return True if msg is a repeated value as a JsonValue."""
-        if isinstance(msg, extra_types.JsonArray):
-            return True
-        if isinstance(msg, extra_types.JsonValue) and msg.array_value:
-            return True
-        return False
-
-    if not _IsRepeatedJsonValue(msg):
-        raise ValueError('invalid argument to _AsMessageList')
-    if isinstance(msg, extra_types.JsonValue):
-        msg = msg.array_value
-    if isinstance(msg, extra_types.JsonArray):
-        msg = msg.entries
-    return msg
diff --git a/apitools/base/py/encoding_helper.py b/apitools/base/py/encoding_helper.py
new file mode 100644
index 0000000..c962aaf
--- /dev/null
+++ b/apitools/base/py/encoding_helper.py
@@ -0,0 +1,806 @@
+#!/usr/bin/env python
+#
+# Copyright 2018 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Common code for converting proto to other formats, such as JSON."""
+
+import base64
+import collections
+import datetime
+import json
+
+import six
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import protojson
+from apitools.base.py import exceptions
+
+
+_Codec = collections.namedtuple('_Codec', ['encoder', 'decoder'])
+CodecResult = collections.namedtuple('CodecResult', ['value', 'complete'])
+
+
+class EdgeType(object):
+    """The type of transition made by an edge."""
+    SCALAR = 1
+    REPEATED = 2
+    MAP = 3
+
+
+class ProtoEdge(collections.namedtuple('ProtoEdge',
+                                       ['type_', 'field', 'index'])):
+    """A description of a one-level transition from a message to a value.
+
+    Protobuf messages can be arbitrarily nested as fields can be defined with
+    any "message" type. This nesting property means that there are often many
+    levels of proto messages within a single message instance. This class can
+    unambiguously describe a single step from a message to some nested value.
+
+    Properties:
+      type_: EdgeType, The type of transition represented by this edge.
+      field: str, The name of the message-typed field.
+      index: Any, Additional data needed to make the transition. The semantics
+          of the "index" property change based on the value of "type_":
+            SCALAR: ignored.
+            REPEATED: a numeric index into "field"'s list.
+            MAP: a key into "field"'s mapping.
+    """
+    __slots__ = ()
+
+    def __str__(self):
+        if self.type_ == EdgeType.SCALAR:
+            return self.field
+        else:
+            return '{}[{}]'.format(self.field, self.index)
+
+
+# TODO(craigcitro): Make these non-global.
+_UNRECOGNIZED_FIELD_MAPPINGS = {}
+_CUSTOM_MESSAGE_CODECS = {}
+_CUSTOM_FIELD_CODECS = {}
+_FIELD_TYPE_CODECS = {}
+
+
+def MapUnrecognizedFields(field_name):
+    """Register field_name as a container for unrecognized fields."""
+    def Register(cls):
+        _UNRECOGNIZED_FIELD_MAPPINGS[cls] = field_name
+        return cls
+    return Register
+
+
+def RegisterCustomMessageCodec(encoder, decoder):
+    """Register a custom encoder/decoder for this message class."""
+    def Register(cls):
+        _CUSTOM_MESSAGE_CODECS[cls] = _Codec(encoder=encoder, decoder=decoder)
+        return cls
+    return Register
+
+
+def RegisterCustomFieldCodec(encoder, decoder):
+    """Register a custom encoder/decoder for this field."""
+    def Register(field):
+        _CUSTOM_FIELD_CODECS[field] = _Codec(encoder=encoder, decoder=decoder)
+        return field
+    return Register
+
+
+def RegisterFieldTypeCodec(encoder, decoder):
+    """Register a custom encoder/decoder for all fields of this type."""
+    def Register(field_type):
+        _FIELD_TYPE_CODECS[field_type] = _Codec(
+            encoder=encoder, decoder=decoder)
+        return field_type
+    return Register
+
+
+def CopyProtoMessage(message):
+    """Make a deep copy of a message."""
+    return JsonToMessage(type(message), MessageToJson(message))
+
+
+def MessageToJson(message, include_fields=None):
+    """Convert the given message to JSON."""
+    result = _ProtoJsonApiTools.Get().encode_message(message)
+    return _IncludeFields(result, message, include_fields)
+
+
+def JsonToMessage(message_type, message):
+    """Convert the given JSON to a message of type message_type."""
+    return _ProtoJsonApiTools.Get().decode_message(message_type, message)
+
+
+# TODO(craigcitro): Do this directly, instead of via JSON.
+def DictToMessage(d, message_type):
+    """Convert the given dictionary to a message of type message_type."""
+    return JsonToMessage(message_type, json.dumps(d))
+
+
+def MessageToDict(message):
+    """Convert the given message to a dictionary."""
+    return json.loads(MessageToJson(message))
+
+
+def DictToAdditionalPropertyMessage(properties, additional_property_type,
+                                    sort_items=False):
+    """Convert the given dictionary to an AdditionalProperty message."""
+    items = properties.items()
+    if sort_items:
+        items = sorted(items)
+    map_ = []
+    for key, value in items:
+        map_.append(additional_property_type.AdditionalProperty(
+            key=key, value=value))
+    return additional_property_type(additionalProperties=map_)
+
+
+def PyValueToMessage(message_type, value):
+    """Convert the given python value to a message of type message_type."""
+    return JsonToMessage(message_type, json.dumps(value))
+
+
+def MessageToPyValue(message):
+    """Convert the given message to a python value."""
+    return json.loads(MessageToJson(message))
+
+
+def MessageToRepr(msg, multiline=False, **kwargs):
+    """Return a repr-style string for a protorpc message.
+
+    protorpc.Message.__repr__ does not return anything that could be considered
+    python code. Adding this function lets us print a protorpc message in such
+    a way that it could be pasted into code later, and used to compare against
+    other things.
+
+    Args:
+      msg: protorpc.Message, the message to be repr'd.
+      multiline: bool, True if the returned string should have each field
+          assignment on its own line.
+      **kwargs: {str:str}, Additional flags for how to format the string.
+
+    Known **kwargs:
+      shortstrings: bool, True if all string values should be
+          truncated at 100 characters, since when mocking the contents
+          typically don't matter except for IDs, and IDs are usually
+          less than 100 characters.
+      no_modules: bool, True if the long module name should not be printed with
+          each type.
+
+    Returns:
+      str, A string of valid python (assuming the right imports have been made)
+      that recreates the message passed into this function.
+
+    """
+
+    # TODO(jasmuth): craigcitro suggests a pretty-printer from apitools/gen.
+
+    indent = kwargs.get('indent', 0)
+
+    def IndentKwargs(kwargs):
+        kwargs = dict(kwargs)
+        kwargs['indent'] = kwargs.get('indent', 0) + 4
+        return kwargs
+
+    if isinstance(msg, list):
+        s = '['
+        for item in msg:
+            if multiline:
+                s += '\n' + ' ' * (indent + 4)
+            s += MessageToRepr(
+                item, multiline=multiline, **IndentKwargs(kwargs)) + ','
+        if multiline:
+            s += '\n' + ' ' * indent
+        s += ']'
+        return s
+
+    if isinstance(msg, messages.Message):
+        s = type(msg).__name__ + '('
+        if not kwargs.get('no_modules'):
+            s = msg.__module__ + '.' + s
+        names = sorted([field.name for field in msg.all_fields()])
+        for name in names:
+            field = msg.field_by_name(name)
+            if multiline:
+                s += '\n' + ' ' * (indent + 4)
+            value = getattr(msg, field.name)
+            s += field.name + '=' + MessageToRepr(
+                value, multiline=multiline, **IndentKwargs(kwargs)) + ','
+        if multiline:
+            s += '\n' + ' ' * indent
+        s += ')'
+        return s
+
+    if isinstance(msg, six.string_types):
+        if kwargs.get('shortstrings') and len(msg) > 100:
+            msg = msg[:100]
+
+    if isinstance(msg, datetime.datetime):
+
+        class SpecialTZInfo(datetime.tzinfo):
+
+            def __init__(self, offset):
+                super(SpecialTZInfo, self).__init__()
+                self.offset = offset
+
+            def __repr__(self):
+                s = 'TimeZoneOffset(' + repr(self.offset) + ')'
+                if not kwargs.get('no_modules'):
+                    s = 'apitools.base.protorpclite.util.' + s
+                return s
+
+        msg = datetime.datetime(
+            msg.year, msg.month, msg.day, msg.hour, msg.minute, msg.second,
+            msg.microsecond, SpecialTZInfo(msg.tzinfo.utcoffset(0)))
+
+    return repr(msg)
+
+
+def _GetField(message, field_path):
+    for field in field_path:
+        if field not in dir(message):
+            raise KeyError('no field "%s"' % field)
+        message = getattr(message, field)
+    return message
+
+
+def _SetField(dictblob, field_path, value):
+    for field in field_path[:-1]:
+        dictblob = dictblob.setdefault(field, {})
+    dictblob[field_path[-1]] = value
+
+
+def _IncludeFields(encoded_message, message, include_fields):
+    """Add the requested fields to the encoded message."""
+    if include_fields is None:
+        return encoded_message
+    result = json.loads(encoded_message)
+    for field_name in include_fields:
+        try:
+            value = _GetField(message, field_name.split('.'))
+            nullvalue = None
+            if isinstance(value, list):
+                nullvalue = []
+        except KeyError:
+            raise exceptions.InvalidDataError(
+                'No field named %s in message of type %s' % (
+                    field_name, type(message)))
+        _SetField(result, field_name.split('.'), nullvalue)
+    return json.dumps(result)
+
+
+def _GetFieldCodecs(field, attr):
+    result = [
+        getattr(_CUSTOM_FIELD_CODECS.get(field), attr, None),
+        getattr(_FIELD_TYPE_CODECS.get(type(field)), attr, None),
+    ]
+    return [x for x in result if x is not None]
+
+
+class _ProtoJsonApiTools(protojson.ProtoJson):
+
+    """JSON encoder used by apitools clients."""
+    _INSTANCE = None
+
+    @classmethod
+    def Get(cls):
+        if cls._INSTANCE is None:
+            cls._INSTANCE = cls()
+        return cls._INSTANCE
+
+    def decode_message(self, message_type, encoded_message):
+        if message_type in _CUSTOM_MESSAGE_CODECS:
+            return _CUSTOM_MESSAGE_CODECS[
+                message_type].decoder(encoded_message)
+        result = _DecodeCustomFieldNames(message_type, encoded_message)
+        result = super(_ProtoJsonApiTools, self).decode_message(
+            message_type, result)
+        result = _ProcessUnknownEnums(result, encoded_message)
+        result = _ProcessUnknownMessages(result, encoded_message)
+        return _DecodeUnknownFields(result, encoded_message)
+
+    def decode_field(self, field, value):
+        """Decode the given JSON value.
+
+        Args:
+          field: a messages.Field for the field we're decoding.
+          value: a python value we'd like to decode.
+
+        Returns:
+          A value suitable for assignment to field.
+        """
+        for decoder in _GetFieldCodecs(field, 'decoder'):
+            result = decoder(field, value)
+            value = result.value
+            if result.complete:
+                return value
+        if isinstance(field, messages.MessageField):
+            field_value = self.decode_message(
+                field.message_type, json.dumps(value))
+        elif isinstance(field, messages.EnumField):
+            value = GetCustomJsonEnumMapping(
+                field.type, json_name=value) or value
+            try:
+                field_value = super(
+                    _ProtoJsonApiTools, self).decode_field(field, value)
+            except messages.DecodeError:
+                if not isinstance(value, six.string_types):
+                    raise
+                field_value = None
+        else:
+            field_value = super(
+                _ProtoJsonApiTools, self).decode_field(field, value)
+        return field_value
+
+    def encode_message(self, message):
+        if isinstance(message, messages.FieldList):
+            return '[%s]' % (', '.join(self.encode_message(x)
+                                       for x in message))
+
+        # pylint: disable=unidiomatic-typecheck
+        if type(message) in _CUSTOM_MESSAGE_CODECS:
+            return _CUSTOM_MESSAGE_CODECS[type(message)].encoder(message)
+
+        message = _EncodeUnknownFields(message)
+        result = super(_ProtoJsonApiTools, self).encode_message(message)
+        result = _EncodeCustomFieldNames(message, result)
+        return json.dumps(json.loads(result), sort_keys=True)
+
+    def encode_field(self, field, value):
+        """Encode the given value as JSON.
+
+        Args:
+          field: a messages.Field for the field we're encoding.
+          value: a value for field.
+
+        Returns:
+          A python value suitable for json.dumps.
+        """
+        for encoder in _GetFieldCodecs(field, 'encoder'):
+            result = encoder(field, value)
+            value = result.value
+            if result.complete:
+                return value
+        if isinstance(field, messages.EnumField):
+            if field.repeated:
+                remapped_value = [GetCustomJsonEnumMapping(
+                    field.type, python_name=e.name) or e.name for e in value]
+            else:
+                remapped_value = GetCustomJsonEnumMapping(
+                    field.type, python_name=value.name)
+            if remapped_value:
+                return remapped_value
+        if (isinstance(field, messages.MessageField) and
+                not isinstance(field, message_types.DateTimeField)):
+            value = json.loads(self.encode_message(value))
+        return super(_ProtoJsonApiTools, self).encode_field(field, value)
+
+
+# TODO(craigcitro): Fold this and _IncludeFields in as codecs.
+def _DecodeUnknownFields(message, encoded_message):
+    """Rewrite unknown fields in message into message.destination."""
+    destination = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
+    if destination is None:
+        return message
+    pair_field = message.field_by_name(destination)
+    if not isinstance(pair_field, messages.MessageField):
+        raise exceptions.InvalidDataFromServerError(
+            'Unrecognized fields must be mapped to a compound '
+            'message type.')
+    pair_type = pair_field.message_type
+    # TODO(craigcitro): Add more error checking around the pair
+    # type being exactly what we suspect (field names, etc).
+    if isinstance(pair_type.value, messages.MessageField):
+        new_values = _DecodeUnknownMessages(
+            message, json.loads(encoded_message), pair_type)
+    else:
+        new_values = _DecodeUnrecognizedFields(message, pair_type)
+    setattr(message, destination, new_values)
+    # We could probably get away with not setting this, but
+    # why not clear it?
+    setattr(message, '_Message__unrecognized_fields', {})
+    return message
+
+
+def _DecodeUnknownMessages(message, encoded_message, pair_type):
+    """Process unknown fields in encoded_message of a message type."""
+    field_type = pair_type.value.type
+    new_values = []
+    all_field_names = [x.name for x in message.all_fields()]
+    for name, value_dict in six.iteritems(encoded_message):
+        if name in all_field_names:
+            continue
+        value = PyValueToMessage(field_type, value_dict)
+        if pair_type.value.repeated:
+            value = _AsMessageList(value)
+        new_pair = pair_type(key=name, value=value)
+        new_values.append(new_pair)
+    return new_values
+
+
+def _DecodeUnrecognizedFields(message, pair_type):
+    """Process unrecognized fields in message."""
+    new_values = []
+    codec = _ProtoJsonApiTools.Get()
+    for unknown_field in message.all_unrecognized_fields():
+        # TODO(craigcitro): Consider validating the variant if
+        # the assignment below doesn't take care of it. It may
+        # also be necessary to check it in the case that the
+        # type has multiple encodings.
+        value, _ = message.get_unrecognized_field_info(unknown_field)
+        value_type = pair_type.field_by_name('value')
+        if isinstance(value_type, messages.MessageField):
+            decoded_value = DictToMessage(value, pair_type.value.message_type)
+        else:
+            decoded_value = codec.decode_field(
+                pair_type.value, value)
+        try:
+            new_pair_key = str(unknown_field)
+        except UnicodeEncodeError:
+            new_pair_key = protojson.ProtoJson().decode_field(
+                pair_type.key, unknown_field)
+        new_pair = pair_type(key=new_pair_key, value=decoded_value)
+        new_values.append(new_pair)
+    return new_values
+
+
+def _CopyProtoMessageVanillaProtoJson(message):
+    codec = protojson.ProtoJson()
+    return codec.decode_message(type(message), codec.encode_message(message))
+
+
+def _EncodeUnknownFields(message):
+    """Remap unknown fields in message out of message.source."""
+    source = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
+    if source is None:
+        return message
+    # CopyProtoMessage uses _ProtoJsonApiTools, which uses this message. Use
+    # the vanilla protojson-based copy function to avoid infinite recursion.
+    result = _CopyProtoMessageVanillaProtoJson(message)
+    pairs_field = message.field_by_name(source)
+    if not isinstance(pairs_field, messages.MessageField):
+        raise exceptions.InvalidUserInputError(
+            'Invalid pairs field %s' % pairs_field)
+    pairs_type = pairs_field.message_type
+    value_field = pairs_type.field_by_name('value')
+    value_variant = value_field.variant
+    pairs = getattr(message, source)
+    codec = _ProtoJsonApiTools.Get()
+    for pair in pairs:
+        encoded_value = codec.encode_field(value_field, pair.value)
+        result.set_unrecognized_field(pair.key, encoded_value, value_variant)
+    setattr(result, source, [])
+    return result
+
+
+def _SafeEncodeBytes(field, value):
+    """Encode the bytes in value as urlsafe base64."""
+    try:
+        if field.repeated:
+            result = [base64.urlsafe_b64encode(byte) for byte in value]
+        else:
+            result = base64.urlsafe_b64encode(value)
+        complete = True
+    except TypeError:
+        result = value
+        complete = False
+    return CodecResult(value=result, complete=complete)
+
+
+def _SafeDecodeBytes(unused_field, value):
+    """Decode the urlsafe base64 value into bytes."""
+    try:
+        result = base64.urlsafe_b64decode(str(value))
+        complete = True
+    except TypeError:
+        result = value
+        complete = False
+    return CodecResult(value=result, complete=complete)
+
+
+def _ProcessUnknownEnums(message, encoded_message):
+    """Add unknown enum values from encoded_message as unknown fields.
+
+    ProtoRPC diverges from the usual protocol buffer behavior here and
+    doesn't allow unknown fields. Throwing on unknown fields makes it
+    impossible to let servers add new enum values and stay compatible
+    with older clients, which isn't reasonable for us. We simply store
+    unrecognized enum values as unknown fields, and all is well.
+
+    Args:
+      message: Proto message we've decoded thus far.
+      encoded_message: JSON string we're decoding.
+
+    Returns:
+      message, with any unknown enums stored as unrecognized fields.
+    """
+    if not encoded_message:
+        return message
+    decoded_message = json.loads(six.ensure_str(encoded_message))
+    for field in message.all_fields():
+        if (isinstance(field, messages.EnumField) and
+                field.name in decoded_message and
+                message.get_assigned_value(field.name) is None):
+            message.set_unrecognized_field(
+                field.name, decoded_message[field.name], messages.Variant.ENUM)
+    return message
+
+
+def _ProcessUnknownMessages(message, encoded_message):
+    """Store any remaining unknown fields as strings.
+
+    ProtoRPC currently ignores unknown values for which no type can be
+    determined (and logs a "No variant found" message). For the purposes
+    of reserializing, this is quite harmful (since it throws away
+    information). Here we simply add those as unknown fields of type
+    string (so that they can easily be reserialized).
+
+    Args:
+      message: Proto message we've decoded thus far.
+      encoded_message: JSON string we're decoding.
+
+    Returns:
+      message, with any remaining unrecognized fields saved.
+    """
+    if not encoded_message:
+        return message
+    decoded_message = json.loads(six.ensure_str(encoded_message))
+    message_fields = [x.name for x in message.all_fields()] + list(
+        message.all_unrecognized_fields())
+    missing_fields = [x for x in decoded_message.keys()
+                      if x not in message_fields]
+    for field_name in missing_fields:
+        message.set_unrecognized_field(field_name, decoded_message[field_name],
+                                       messages.Variant.STRING)
+    return message
+
+
+RegisterFieldTypeCodec(_SafeEncodeBytes, _SafeDecodeBytes)(messages.BytesField)
+
+
+# Note that these could share a dictionary, since they're keyed by
+# distinct types, but it's not really worth it.
+_JSON_ENUM_MAPPINGS = {}
+_JSON_FIELD_MAPPINGS = {}
+
+
+def AddCustomJsonEnumMapping(enum_type, python_name, json_name,
+                             package=None):  # pylint: disable=unused-argument
+    """Add a custom wire encoding for a given enum value.
+
+    This is primarily used in generated code, to handle enum values
+    which happen to be Python keywords.
+
+    Args:
+      enum_type: (messages.Enum) An enum type
+      python_name: (basestring) Python name for this value.
+      json_name: (basestring) JSON name to be used on the wire.
+      package: (NoneType, optional) No effect, exists for legacy compatibility.
+    """
+    if not issubclass(enum_type, messages.Enum):
+        raise exceptions.TypecheckError(
+            'Cannot set JSON enum mapping for non-enum "%s"' % enum_type)
+    if python_name not in enum_type.names():
+        raise exceptions.InvalidDataError(
+            'Enum value %s not a value for type %s' % (python_name, enum_type))
+    field_mappings = _JSON_ENUM_MAPPINGS.setdefault(enum_type, {})
+    _CheckForExistingMappings('enum', enum_type, python_name, json_name)
+    field_mappings[python_name] = json_name
+
+
+def AddCustomJsonFieldMapping(message_type, python_name, json_name,
+                              package=None):  # pylint: disable=unused-argument
+    """Add a custom wire encoding for a given message field.
+
+    This is primarily used in generated code, to handle enum values
+    which happen to be Python keywords.
+
+    Args:
+      message_type: (messages.Message) A message type
+      python_name: (basestring) Python name for this value.
+      json_name: (basestring) JSON name to be used on the wire.
+      package: (NoneType, optional) No effect, exists for legacy compatibility.
+    """
+    if not issubclass(message_type, messages.Message):
+        raise exceptions.TypecheckError(
+            'Cannot set JSON field mapping for '
+            'non-message "%s"' % message_type)
+    try:
+        _ = message_type.field_by_name(python_name)
+    except KeyError:
+        raise exceptions.InvalidDataError(
+            'Field %s not recognized for type %s' % (
+                python_name, message_type))
+    field_mappings = _JSON_FIELD_MAPPINGS.setdefault(message_type, {})
+    _CheckForExistingMappings('field', message_type, python_name, json_name)
+    field_mappings[python_name] = json_name
+
+
+def GetCustomJsonEnumMapping(enum_type, python_name=None, json_name=None):
+    """Return the appropriate remapping for the given enum, or None."""
+    return _FetchRemapping(enum_type, 'enum',
+                           python_name=python_name, json_name=json_name,
+                           mappings=_JSON_ENUM_MAPPINGS)
+
+
+def GetCustomJsonFieldMapping(message_type, python_name=None, json_name=None):
+    """Return the appropriate remapping for the given field, or None."""
+    return _FetchRemapping(message_type, 'field',
+                           python_name=python_name, json_name=json_name,
+                           mappings=_JSON_FIELD_MAPPINGS)
+
+
+def _FetchRemapping(type_name, mapping_type, python_name=None, json_name=None,
+                    mappings=None):
+    """Common code for fetching a key or value from a remapping dict."""
+    if python_name and json_name:
+        raise exceptions.InvalidDataError(
+            'Cannot specify both python_name and json_name '
+            'for %s remapping' % mapping_type)
+    if not (python_name or json_name):
+        raise exceptions.InvalidDataError(
+            'Must specify either python_name or json_name for %s remapping' % (
+                mapping_type,))
+    field_remappings = mappings.get(type_name, {})
+    if field_remappings:
+        if python_name:
+            return field_remappings.get(python_name)
+        elif json_name:
+            if json_name in list(field_remappings.values()):
+                return [k for k in field_remappings
+                        if field_remappings[k] == json_name][0]
+    return None
+
+
+def _CheckForExistingMappings(mapping_type, message_type,
+                              python_name, json_name):
+    """Validate that no mappings exist for the given values."""
+    if mapping_type == 'field':
+        getter = GetCustomJsonFieldMapping
+    elif mapping_type == 'enum':
+        getter = GetCustomJsonEnumMapping
+    remapping = getter(message_type, python_name=python_name)
+    if remapping is not None and remapping != json_name:
+        raise exceptions.InvalidDataError(
+            'Cannot add mapping for %s "%s", already mapped to "%s"' % (
+                mapping_type, python_name, remapping))
+    remapping = getter(message_type, json_name=json_name)
+    if remapping is not None and remapping != python_name:
+        raise exceptions.InvalidDataError(
+            'Cannot add mapping for %s "%s", already mapped to "%s"' % (
+                mapping_type, json_name, remapping))
+
+
+def _EncodeCustomFieldNames(message, encoded_value):
+    field_remappings = list(_JSON_FIELD_MAPPINGS.get(type(message), {})
+                            .items())
+    if field_remappings:
+        decoded_value = json.loads(encoded_value)
+        for python_name, json_name in field_remappings:
+            if python_name in encoded_value:
+                decoded_value[json_name] = decoded_value.pop(python_name)
+        encoded_value = json.dumps(decoded_value)
+    return encoded_value
+
+
+def _DecodeCustomFieldNames(message_type, encoded_message):
+    field_remappings = _JSON_FIELD_MAPPINGS.get(message_type, {})
+    if field_remappings:
+        decoded_message = json.loads(encoded_message)
+        for python_name, json_name in list(field_remappings.items()):
+            if json_name in decoded_message:
+                decoded_message[python_name] = decoded_message.pop(json_name)
+        encoded_message = json.dumps(decoded_message)
+    return encoded_message
+
+
+def _AsMessageList(msg):
+    """Convert the provided list-as-JsonValue to a list."""
+    # This really needs to live in extra_types, but extra_types needs
+    # to import this file to be able to register codecs.
+    # TODO(craigcitro): Split out a codecs module and fix this ugly
+    # import.
+    from apitools.base.py import extra_types
+
+    def _IsRepeatedJsonValue(msg):
+        """Return True if msg is a repeated value as a JsonValue."""
+        if isinstance(msg, extra_types.JsonArray):
+            return True
+        if isinstance(msg, extra_types.JsonValue) and msg.array_value:
+            return True
+        return False
+
+    if not _IsRepeatedJsonValue(msg):
+        raise ValueError('invalid argument to _AsMessageList')
+    if isinstance(msg, extra_types.JsonValue):
+        msg = msg.array_value
+    if isinstance(msg, extra_types.JsonArray):
+        msg = msg.entries
+    return msg
+
+
+def _IsMap(message, field):
+    """Returns whether the "field" is actually a map-type."""
+    value = message.get_assigned_value(field.name)
+    if not isinstance(value, messages.Message):
+        return False
+    try:
+        additional_properties = value.field_by_name('additionalProperties')
+    except KeyError:
+        return False
+    else:
+        return additional_properties.repeated
+
+
+def _MapItems(message, field):
+    """Yields the (key, value) pair of the map values."""
+    assert _IsMap(message, field)
+    map_message = message.get_assigned_value(field.name)
+    additional_properties = map_message.get_assigned_value(
+        'additionalProperties')
+    for kv_pair in additional_properties:
+        yield kv_pair.key, kv_pair.value
+
+
+def UnrecognizedFieldIter(message, _edges=()):  # pylint: disable=invalid-name
+    """Yields the locations of unrecognized fields within "message".
+
+    If a sub-message is found to have unrecognized fields, that sub-message
+    will not be searched any further. We prune the search of the sub-message
+    because we assume it is malformed and further checks will not yield
+    productive errors.
+
+    Args:
+      message: The Message instance to search.
+      _edges: Internal arg for passing state.
+
+    Yields:
+      (edges_to_message, field_names):
+        edges_to_message: List[ProtoEdge], The edges (relative to "message")
+            describing the path to the sub-message where the unrecognized
+            fields were found.
+        field_names: List[Str], The names of the field(s) that were
+            unrecognized in the sub-message.
+    """
+    if not isinstance(message, messages.Message):
+        # This is a primitive leaf, no errors found down this path.
+        return
+
+    field_names = message.all_unrecognized_fields()
+    if field_names:
+        # This message is malformed. Stop recursing and report it.
+        yield _edges, field_names
+        return
+
+    # Recurse through all fields in the current message.
+    for field in message.all_fields():
+        value = message.get_assigned_value(field.name)
+        if field.repeated:
+            for i, item in enumerate(value):
+                repeated_edge = ProtoEdge(EdgeType.REPEATED, field.name, i)
+                iter_ = UnrecognizedFieldIter(item, _edges + (repeated_edge,))
+                for (e, y) in iter_:
+                    yield e, y
+        elif _IsMap(message, field):
+            for key, item in _MapItems(message, field):
+                map_edge = ProtoEdge(EdgeType.MAP, field.name, key)
+                iter_ = UnrecognizedFieldIter(item, _edges + (map_edge,))
+                for (e, y) in iter_:
+                    yield e, y
+        else:
+            scalar_edge = ProtoEdge(EdgeType.SCALAR, field.name, None)
+            iter_ = UnrecognizedFieldIter(value, _edges + (scalar_edge,))
+            for (e, y) in iter_:
+                yield e, y
diff --git a/apitools/base/py/encoding_test.py b/apitools/base/py/encoding_test.py
index cb6bfe5..d130cc5 100644
--- a/apitools/base/py/encoding_test.py
+++ b/apitools/base/py/encoding_test.py
@@ -1,4 +1,4 @@
-#
+# -*- coding: utf-8 -*-
 # Copyright 2015 Google Inc.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -42,29 +42,29 @@
     timefield = message_types.DateTimeField(3)
 
 
-@encoding.MapUnrecognizedFields('additional_properties')
+@encoding.MapUnrecognizedFields('additionalProperties')
 class AdditionalPropertiesMessage(messages.Message):
 
     class AdditionalProperty(messages.Message):
         key = messages.StringField(1)
         value = messages.StringField(2)
 
-    additional_properties = messages.MessageField(
+    additionalProperties = messages.MessageField(
         'AdditionalProperty', 1, repeated=True)
 
 
-@encoding.MapUnrecognizedFields('additional_properties')
+@encoding.MapUnrecognizedFields('additionalProperties')
 class AdditionalIntPropertiesMessage(messages.Message):
 
     class AdditionalProperty(messages.Message):
         key = messages.StringField(1)
         value = messages.IntegerField(2)
 
-    additional_properties = messages.MessageField(
+    additionalProperties = messages.MessageField(
         'AdditionalProperty', 1, repeated=True)
 
 
-@encoding.MapUnrecognizedFields('additional_properties')
+@encoding.MapUnrecognizedFields('additionalProperties')
 class UnrecognizedEnumMessage(messages.Message):
 
     class ThisEnum(messages.Enum):
@@ -75,7 +75,7 @@
         key = messages.StringField(1)
         value = messages.EnumField('UnrecognizedEnumMessage.ThisEnum', 2)
 
-    additional_properties = messages.MessageField(
+    additionalProperties = messages.MessageField(
         AdditionalProperty, 1, repeated=True)
 
 
@@ -95,17 +95,96 @@
     ignored_field = messages.EnumField(ThisEnum, 3)
 
 
-@encoding.MapUnrecognizedFields('additional_properties')
+@encoding.MapUnrecognizedFields('additionalProperties')
 class AdditionalMessagePropertiesMessage(messages.Message):
 
     class AdditionalProperty(messages.Message):
         key = messages.StringField(1)
         value = messages.MessageField(CompoundPropertyType, 2)
 
-    additional_properties = messages.MessageField(
+    additionalProperties = messages.MessageField(
         'AdditionalProperty', 1, repeated=True)
 
 
+@encoding.MapUnrecognizedFields('additionalProperties')
+class MapToMessageWithEnum(messages.Message):
+
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = messages.MessageField(MessageWithEnum, 2)
+
+    additionalProperties = messages.MessageField(
+        'AdditionalProperty', 1, repeated=True)
+
+
+@encoding.MapUnrecognizedFields('additionalProperties')
+class NestedAdditionalPropertiesWithEnumMessage(messages.Message):
+
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = messages.MessageField(
+            MapToMessageWithEnum, 2)
+
+    additionalProperties = messages.MessageField(
+        'AdditionalProperty', 1, repeated=True)
+
+
+@encoding.MapUnrecognizedFields('additionalProperties')
+class AdditionalPropertiesWithEnumMessage(messages.Message):
+
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = messages.MessageField(MessageWithEnum, 2)
+
+    additionalProperties = messages.MessageField(
+        'AdditionalProperty', 1, repeated=True)
+
+
+class NestedMapMessage(messages.Message):
+
+    msg_field = messages.MessageField(AdditionalPropertiesWithEnumMessage, 1)
+
+
+class RepeatedNestedMapMessage(messages.Message):
+
+    map_field = messages.MessageField(NestedMapMessage, 1, repeated=True)
+
+
+class NestedWithEnumMessage(messages.Message):
+
+    class ThisEnum(messages.Enum):
+        VALUE_ONE = 1
+        VALUE_TWO = 2
+
+    msg_field = messages.MessageField(MessageWithEnum, 1)
+    enum_field = messages.EnumField(ThisEnum, 2)
+
+
+class RepeatedNestedMessage(messages.Message):
+
+    msg_field = messages.MessageField(SimpleMessage, 1, repeated=True)
+
+
+@encoding.MapUnrecognizedFields('additionalProperties')
+class MapToBytesValue(messages.Message):
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = messages.BytesField(2)
+
+    additionalProperties = messages.MessageField('AdditionalProperty', 1,
+                                                 repeated=True)
+
+
+@encoding.MapUnrecognizedFields('additionalProperties')
+class MapToDateTimeValue(messages.Message):
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = message_types.DateTimeField(2)
+
+    additionalProperties = messages.MessageField('AdditionalProperty', 1,
+                                                 repeated=True)
+
+
 class HasNestedMessage(messages.Message):
     nested = messages.MessageField(AdditionalPropertiesMessage, 1)
     nested_list = messages.StringField(2, repeated=True)
@@ -128,15 +207,25 @@
     repeated_field = messages.StringField(5, repeated=True)
 
 
-@encoding.MapUnrecognizedFields('additional_properties')
+class MessageWithPackageAndRemappings(messages.Message):
+
+    class SomeEnum(messages.Enum):
+        enum_value = 1
+        second_value = 2
+
+    enum_field = messages.EnumField(SomeEnum, 1)
+    another_field = messages.StringField(2)
+
+
+@encoding.MapUnrecognizedFields('additionalProperties')
 class RepeatedJsonValueMessage(messages.Message):
 
     class AdditionalProperty(messages.Message):
         key = messages.StringField(1)
         value = messages.MessageField(extra_types.JsonValue, 2, repeated=True)
 
-    additional_properties = messages.MessageField('AdditionalProperty', 1,
-                                                  repeated=True)
+    additionalProperties = messages.MessageField('AdditionalProperty', 1,
+                                                 repeated=True)
 
 
 encoding.AddCustomJsonEnumMapping(MessageWithRemappings.SomeEnum,
@@ -158,6 +247,39 @@
         msg.field = 'def'
         self.assertNotEqual(msg.field, new_msg.field)
 
+    def testCopyProtoMessageInvalidEnum(self):
+        json_msg = '{"field_one": "BAD_VALUE"}'
+        orig_msg = encoding.JsonToMessage(MessageWithEnum, json_msg)
+        new_msg = encoding.CopyProtoMessage(orig_msg)
+        for msg in (orig_msg, new_msg):
+            self.assertEqual(msg.all_unrecognized_fields(), ['field_one'])
+            self.assertEqual(
+                msg.get_unrecognized_field_info('field_one',
+                                                value_default=None),
+                ('BAD_VALUE', messages.Variant.ENUM))
+
+    def testCopyProtoMessageAdditionalProperties(self):
+        msg = AdditionalPropertiesMessage(additionalProperties=[
+            AdditionalPropertiesMessage.AdditionalProperty(
+                key='key', value='value')])
+        new_msg = encoding.CopyProtoMessage(msg)
+        self.assertEqual(len(new_msg.additionalProperties), 1)
+        self.assertEqual(new_msg.additionalProperties[0].key, 'key')
+        self.assertEqual(new_msg.additionalProperties[0].value, 'value')
+
+    def testCopyProtoMessageMappingInvalidEnum(self):
+        json_msg = '{"key_one": {"field_one": "BAD_VALUE"}}'
+        orig_msg = encoding.JsonToMessage(MapToMessageWithEnum, json_msg)
+        new_msg = encoding.CopyProtoMessage(orig_msg)
+        for msg in (orig_msg, new_msg):
+            self.assertEqual(
+                msg.additionalProperties[0].value.all_unrecognized_fields(),
+                ['field_one'])
+            self.assertEqual(
+                msg.additionalProperties[0].value.get_unrecognized_field_info(
+                    'field_one', value_default=None),
+                ('BAD_VALUE', messages.Variant.ENUM))
+
     def testBytesEncoding(self):
         b64_str = 'AAc+'
         b64_msg = '{"field": "%s"}' % b64_str
@@ -177,6 +299,71 @@
             rep_msg, encoding.JsonToMessage(BytesMessage, enc_rep_msg))
         self.assertEqual(enc_rep_msg, encoding.MessageToJson(rep_msg))
 
+    def testBase64RoundtripForMapFields(self):
+        raw_data = b'\xFF\x0F\x80'
+        encoded_data = '/w+A'  # Has url-unsafe base64 characters
+        safe_encoded_data = base64.urlsafe_b64encode(raw_data).decode("utf-8")
+        self.assertEqual(raw_data, base64.b64decode(encoded_data))
+
+        # Use unsafe encoding, make sure we can load it.
+        json_data = '{"1st": "%s"}' % encoded_data
+        msg = encoding.JsonToMessage(MapToBytesValue, json_data)
+        self.assertEqual(raw_data, msg.additionalProperties[0].value)
+
+        # Now back to json and again to message
+        from_msg_json_data = encoding.MessageToJson(msg)
+        # Make sure now it is safe url encoded
+        self.assertEqual(safe_encoded_data,
+                         json.loads(from_msg_json_data)['1st'])
+        # Make sure we can also load url safe encoded bytes.
+        redone_msg = encoding.JsonToMessage(MapToBytesValue,
+                                            from_msg_json_data)
+        # Still matches
+        self.assertEqual(raw_data, redone_msg.additionalProperties[0].value)
+
+    def testBytesEncodingInAMap(self):
+        # Leading bit is 1 should not be interpreted as unicode.
+        data1 = b'\xF0\x11\x0F'
+        data2 = b'\xFF\xFF\xFF'
+
+        msg = MapToBytesValue(
+            additionalProperties=[
+                MapToBytesValue.AdditionalProperty(key='1st', value=data1),
+                MapToBytesValue.AdditionalProperty(key='2nd', value=data2)
+            ])
+
+        self.assertEqual(
+            '{"1st": "%s", "2nd": "%s"}' % (
+                base64.b64encode(data1, b'-_').decode("utf-8"),
+                base64.b64encode(data2, b'-_').decode("utf-8")),
+            encoding.MessageToJson(msg))
+
+    def testDateTimeEncodingInAMap(self):
+        msg = MapToDateTimeValue(
+            additionalProperties=[
+                MapToDateTimeValue.AdditionalProperty(
+                    key='1st',
+                    value=datetime.datetime(
+                        2014, 7, 2, 23, 33, 25, 541000,
+                        tzinfo=util.TimeZoneOffset(datetime.timedelta(0)))),
+                MapToDateTimeValue.AdditionalProperty(
+                    key='2nd',
+                    value=datetime.datetime(
+                        2015, 7, 2, 23, 33, 25, 541000,
+                        tzinfo=util.TimeZoneOffset(datetime.timedelta(0))))
+            ])
+
+        self.assertEqual(
+            '{"1st": "2014-07-02T23:33:25.541000+00:00",'
+            ' "2nd": "2015-07-02T23:33:25.541000+00:00"}',
+            encoding.MessageToJson(msg))
+
+    def testInvalidEnumEncodingInAMap(self):
+        json_msg = '{"key_one": {"field_one": "BAD_VALUE"}}'
+        msg = encoding.JsonToMessage(MapToMessageWithEnum, json_msg)
+        new_msg = encoding.MessageToJson(msg)
+        self.assertEqual('{"key_one": {"field_one": "BAD_VALUE"}}', new_msg)
+
     def testIncludeFields(self):
         msg = SimpleMessage()
         self.assertEqual('{}', encoding.MessageToJson(msg))
@@ -190,14 +377,14 @@
     def testNestedIncludeFields(self):
         msg = HasNestedMessage(
             nested=AdditionalPropertiesMessage(
-                additional_properties=[]))
+                additionalProperties=[]))
         self.assertEqual(
             '{"nested": null}',
             encoding.MessageToJson(msg, include_fields=['nested']))
         self.assertEqual(
-            '{"nested": {"additional_properties": []}}',
+            '{"nested": {"additionalProperties": []}}',
             encoding.MessageToJson(
-                msg, include_fields=['nested.additional_properties']))
+                msg, include_fields=['nested.additionalProperties']))
         msg = ExtraNestedMessage(nested=msg)
         self.assertEqual(
             '{"nested": {"nested": null}}',
@@ -209,45 +396,45 @@
             ['{"nested": {"nested": {}, "nested_list": []}}',
              '{"nested": {"nested_list": [], "nested": {}}}'])
         self.assertEqual(
-            '{"nested": {"nested": {"additional_properties": []}}}',
+            '{"nested": {"nested": {"additionalProperties": []}}}',
             encoding.MessageToJson(
-                msg, include_fields=['nested.nested.additional_properties']))
+                msg, include_fields=['nested.nested.additionalProperties']))
 
     def testAdditionalPropertyMapping(self):
         msg = AdditionalPropertiesMessage()
-        msg.additional_properties = [
+        msg.additionalProperties = [
             AdditionalPropertiesMessage.AdditionalProperty(
                 key='key_one', value='value_one'),
             AdditionalPropertiesMessage.AdditionalProperty(
-                key='key_two', value='value_two'),
+                key=u'key_twð', value='value_two'),
         ]
 
         encoded_msg = encoding.MessageToJson(msg)
         self.assertEqual(
-            {'key_one': 'value_one', 'key_two': 'value_two'},
+            {'key_one': 'value_one', u'key_twð': 'value_two'},
             json.loads(encoded_msg))
 
         new_msg = encoding.JsonToMessage(type(msg), encoded_msg)
         self.assertEqual(
-            set(('key_one', 'key_two')),
-            set([x.key for x in new_msg.additional_properties]))
+            set(('key_one', u'key_twð')),
+            set([x.key for x in new_msg.additionalProperties]))
         self.assertIsNot(msg, new_msg)
 
-        new_msg.additional_properties.pop()
-        self.assertEqual(1, len(new_msg.additional_properties))
-        self.assertEqual(2, len(msg.additional_properties))
+        new_msg.additionalProperties.pop()
+        self.assertEqual(1, len(new_msg.additionalProperties))
+        self.assertEqual(2, len(msg.additionalProperties))
 
     def testNumericPropertyName(self):
         json_msg = '{"nested": {"123": "def"}}'
         msg = encoding.JsonToMessage(HasNestedMessage, json_msg)
-        self.assertEqual(1, len(msg.nested.additional_properties))
+        self.assertEqual(1, len(msg.nested.additionalProperties))
 
     def testNumericPropertyValue(self):
         json_msg = '{"key_one": "123"}'
         msg = encoding.JsonToMessage(AdditionalIntPropertiesMessage, json_msg)
         self.assertEqual(
             AdditionalIntPropertiesMessage(
-                additional_properties=[
+                additionalProperties=[
                     AdditionalIntPropertiesMessage.AdditionalProperty(
                         key='key_one', value=123)]),
             msg)
@@ -256,20 +443,20 @@
         json_msg = '{"input": {"index": 0, "name": "output"}}'
         result = encoding.JsonToMessage(
             AdditionalMessagePropertiesMessage, json_msg)
-        self.assertEqual(1, len(result.additional_properties))
-        self.assertEqual(0, result.additional_properties[0].value.index)
+        self.assertEqual(1, len(result.additionalProperties))
+        self.assertEqual(0, result.additionalProperties[0].value.index)
 
     def testUnrecognizedEnum(self):
         json_msg = '{"input": "VALUE_ONE"}'
         result = encoding.JsonToMessage(
             UnrecognizedEnumMessage, json_msg)
-        self.assertEqual(1, len(result.additional_properties))
+        self.assertEqual(1, len(result.additionalProperties))
         self.assertEqual(UnrecognizedEnumMessage.ThisEnum.VALUE_ONE,
-                         result.additional_properties[0].value)
+                         result.additionalProperties[0].value)
 
     def testNestedFieldMapping(self):
         nested_msg = AdditionalPropertiesMessage()
-        nested_msg.additional_properties = [
+        nested_msg.additionalProperties = [
             AdditionalPropertiesMessage.AdditionalProperty(
                 key='key_one', value='value_one'),
             AdditionalPropertiesMessage.AdditionalProperty(
@@ -285,11 +472,11 @@
         new_msg = encoding.JsonToMessage(type(msg), encoded_msg)
         self.assertEqual(
             set(('key_one', 'key_two')),
-            set([x.key for x in new_msg.nested.additional_properties]))
+            set([x.key for x in new_msg.nested.additionalProperties]))
 
-        new_msg.nested.additional_properties.pop()
-        self.assertEqual(1, len(new_msg.nested.additional_properties))
-        self.assertEqual(2, len(msg.nested.additional_properties))
+        new_msg.nested.additionalProperties.pop()
+        self.assertEqual(1, len(new_msg.nested.additionalProperties))
+        self.assertEqual(2, len(msg.nested.additionalProperties))
 
     def testValidEnums(self):
         message_json = '{"field_one": "VALUE_ONE"}'
@@ -325,6 +512,14 @@
         self.assertEqual(json.loads(json_message),
                          json.loads(encoding.MessageToJson(message)))
 
+    def testUnknownEnumNestedRoundtrip(self):
+        json_with_typo = ('{"outer_key": {"key_one": {"field_one": '
+                          '"VALUE_OEN", "field_two": "VALUE_OEN"}}}')
+        msg = encoding.JsonToMessage(NestedAdditionalPropertiesWithEnumMessage,
+                                     json_with_typo)
+        self.assertEqual(json.loads(json_with_typo),
+                         json.loads(encoding.MessageToJson(msg)))
+
     def testJsonDatetime(self):
         msg = TimeMessage(timefield=datetime.datetime(
             2014, 7, 2, 23, 33, 25, 541000,
@@ -360,6 +555,48 @@
         self.assertEqual(
             msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
 
+    def testFieldRemappingWithPackage(self):
+        this_module = sys.modules[__name__]
+        package_name = 'my_package'
+        try:
+            setattr(this_module, 'package', package_name)
+
+            encoding.AddCustomJsonFieldMapping(
+                MessageWithPackageAndRemappings,
+                'another_field', 'wire_field_name', package=package_name)
+
+            msg = MessageWithPackageAndRemappings(another_field='my value')
+            json_message = encoding.MessageToJson(msg)
+            self.assertEqual('{"wire_field_name": "my value"}', json_message)
+            self.assertEqual(
+                msg,
+                encoding.JsonToMessage(MessageWithPackageAndRemappings,
+                                       json_message))
+        finally:
+            delattr(this_module, 'package')
+
+    def testEnumRemappingWithPackage(self):
+        this_module = sys.modules[__name__]
+        package_name = 'my_package'
+        try:
+            setattr(this_module, 'package', package_name)
+
+            encoding.AddCustomJsonEnumMapping(
+                MessageWithPackageAndRemappings.SomeEnum,
+                'enum_value', 'other_wire_name', package=package_name)
+
+            msg = MessageWithPackageAndRemappings(
+                enum_field=MessageWithPackageAndRemappings.SomeEnum.enum_value)
+            json_message = encoding.MessageToJson(msg)
+            self.assertEqual('{"enum_field": "other_wire_name"}', json_message)
+            self.assertEqual(
+                msg,
+                encoding.JsonToMessage(MessageWithPackageAndRemappings,
+                                       json_message))
+
+        finally:
+            delattr(this_module, 'package')
+
     def testRepeatedFieldRemapping(self):
         msg = MessageWithRemappings(repeated_field=['abc', 'def'])
         json_message = encoding.MessageToJson(msg)
@@ -434,71 +671,102 @@
             'timefield=datetime.datetime(2014, 7, 2, 23, 33, 25, 541000, '
             'tzinfo=TimeZoneOffset(datetime.timedelta(0))),\n)')
 
-    def testPackageMappingsNoPackage(self):
-        this_module_name = util.get_package_for_module(__name__)
-        full_type_name = 'MessageWithEnum.ThisEnum'
-        full_key = '%s.%s' % (this_module_name, full_type_name)
-        self.assertEqual(full_key,
-                         encoding._GetTypeKey(MessageWithEnum.ThisEnum, ''))
-
-    def testPackageMappingsWithPackage(self):
-        this_module_name = util.get_package_for_module(__name__)
-        full_type_name = 'MessageWithEnum.ThisEnum'
-        full_key = '%s.%s' % (this_module_name, full_type_name)
-        this_module = sys.modules[__name__]
-        new_package = 'new_package'
-        try:
-            setattr(this_module, 'package', new_package)
-            new_key = '%s.%s' % (new_package, full_type_name)
-            self.assertEqual(
-                new_key,
-                encoding._GetTypeKey(MessageWithEnum.ThisEnum, ''))
-            self.assertEqual(
-                full_key,
-                encoding._GetTypeKey(MessageWithEnum.ThisEnum, new_package))
-        finally:
-            delattr(this_module, 'package')
-
     def testRepeatedJsonValuesAsRepeatedProperty(self):
         encoded_msg = '{"a": [{"one": 1}]}'
         msg = encoding.JsonToMessage(RepeatedJsonValueMessage, encoded_msg)
         self.assertEqual(encoded_msg, encoding.MessageToJson(msg))
 
-    def testDictToProtoMap(self):
+    def testDictToAdditionalPropertyMessage(self):
         dict_ = {'key': 'value'}
 
-        encoded_msg = encoding.DictToProtoMap(dict_,
-                                              AdditionalPropertiesMessage)
+        encoded_msg = encoding.DictToAdditionalPropertyMessage(
+            dict_, AdditionalPropertiesMessage)
         expected_msg = AdditionalPropertiesMessage()
-        expected_msg.additional_properties = [
+        expected_msg.additionalProperties = [
             AdditionalPropertiesMessage.AdditionalProperty(
                 key='key', value='value')
         ]
         self.assertEqual(encoded_msg, expected_msg)
 
-    def testDictToProtoMapSorted(self):
+    def testDictToAdditionalPropertyMessageSorted(self):
         tuples = [('key{0:02}'.format(i), 'value') for i in range(100)]
         dict_ = dict(tuples)
 
-        encoded_msg = encoding.DictToProtoMap(dict_,
-                                              AdditionalPropertiesMessage,
-                                              sort_items=True)
+        encoded_msg = encoding.DictToAdditionalPropertyMessage(
+            dict_, AdditionalPropertiesMessage, sort_items=True)
         expected_msg = AdditionalPropertiesMessage()
-        expected_msg.additional_properties = [
+        expected_msg.additionalProperties = [
             AdditionalPropertiesMessage.AdditionalProperty(
                 key=key, value=value)
             for key, value in tuples
         ]
         self.assertEqual(encoded_msg, expected_msg)
 
-    def testDictToProtoMapNumeric(self):
+    def testDictToAdditionalPropertyMessageNumeric(self):
         dict_ = {'key': 1}
 
-        encoded_msg = encoding.DictToProtoMap(dict_,
-                                              AdditionalIntPropertiesMessage)
+        encoded_msg = encoding.DictToAdditionalPropertyMessage(
+            dict_, AdditionalIntPropertiesMessage)
         expected_msg = AdditionalIntPropertiesMessage()
-        expected_msg.additional_properties = [
+        expected_msg.additionalProperties = [
             AdditionalIntPropertiesMessage.AdditionalProperty(
                 key='key', value=1)
         ]
         self.assertEqual(encoded_msg, expected_msg)
+
+    def testUnrecognizedFieldIter(self):
+        m = encoding.DictToMessage({
+            'nested': {
+                'nested': {'a': 'b'},
+                'nested_list': ['foo'],
+                'extra_field': 'foo',
+            }
+        }, ExtraNestedMessage)
+        results = list(encoding.UnrecognizedFieldIter(m))
+        self.assertEqual(1, len(results))
+        edges, fields = results[0]
+        expected_edge = encoding.ProtoEdge(
+            encoding.EdgeType.SCALAR, 'nested', None)
+        self.assertEqual((expected_edge,), edges)
+        self.assertEqual(['extra_field'], fields)
+
+    def testUnrecognizedFieldIterRepeated(self):
+        m = encoding.DictToMessage({
+            'msg_field': [
+                {'field': 'foo'},
+                {'not_a_field': 'bar'}
+            ]
+        }, RepeatedNestedMessage)
+        results = list(encoding.UnrecognizedFieldIter(m))
+        self.assertEqual(1, len(results))
+        edges, fields = results[0]
+        expected_edge = encoding.ProtoEdge(
+            encoding.EdgeType.REPEATED, 'msg_field', 1)
+        self.assertEqual((expected_edge,), edges)
+        self.assertEqual(['not_a_field'], fields)
+
+    def testUnrecognizedFieldIterNestedMap(self):
+        m = encoding.DictToMessage({
+            'map_field': [{
+                'msg_field': {
+                    'foo': {'field_one': 1},
+                    'bar': {'not_a_field': 1},
+                }
+            }]
+        }, RepeatedNestedMapMessage)
+        results = list(encoding.UnrecognizedFieldIter(m))
+        self.assertEqual(1, len(results))
+        edges, fields = results[0]
+        expected_edges = (
+            encoding.ProtoEdge(encoding.EdgeType.REPEATED, 'map_field', 0),
+            encoding.ProtoEdge(encoding.EdgeType.MAP, 'msg_field', 'bar'),
+        )
+        self.assertEqual(expected_edges, edges)
+        self.assertEqual(['not_a_field'], fields)
+
+    def testUnrecognizedFieldIterAbortAfterFirstError(self):
+        m = encoding.DictToMessage({
+            'msg_field': {'field_one': 3},
+            'enum_field': 3,
+        }, NestedWithEnumMessage)
+        self.assertEqual(1, len(list(encoding.UnrecognizedFieldIter(m))))
diff --git a/apitools/base/py/exceptions.py b/apitools/base/py/exceptions.py
index e63b893..828a1f4 100644
--- a/apitools/base/py/exceptions.py
+++ b/apitools/base/py/exceptions.py
@@ -53,7 +53,8 @@
 
     def __init__(self, response, content, url,
                  method_config=None, request=None):
-        super(HttpError, self).__init__()
+        error_message = HttpError._build_message(response, content, url)
+        super(HttpError, self).__init__(error_message)
         self.response = response
         self.content = content
         self.url = url
@@ -61,11 +62,14 @@
         self.request = request
 
     def __str__(self):
-        content = self.content
+        return HttpError._build_message(self.response, self.content, self.url)
+
+    @staticmethod
+    def _build_message(response, content, url):
         if isinstance(content, bytes):
-            content = self.content.decode('ascii', 'replace')
+            content = content.decode('ascii', 'replace')
         return 'HttpError accessing <%s>: response: <%s>, content <%s>' % (
-            self.url, self.response, content)
+            url, response, content)
 
     @property
     def status_code(self):
@@ -74,9 +78,43 @@
         return int(self.response['status'])
 
     @classmethod
-    def FromResponse(cls, http_response):
-        return cls(http_response.info, http_response.content,
-                   http_response.request_url)
+    def FromResponse(cls, http_response, **kwargs):
+        try:
+            status_code = int(http_response.info.get('status'))
+            error_cls = _HTTP_ERRORS.get(status_code, cls)
+        except ValueError:
+            error_cls = cls
+        return error_cls(http_response.info, http_response.content,
+                         http_response.request_url, **kwargs)
+
+
+class HttpBadRequestError(HttpError):
+    """HTTP 400 Bad Request."""
+
+
+class HttpUnauthorizedError(HttpError):
+    """HTTP 401 Unauthorized."""
+
+
+class HttpForbiddenError(HttpError):
+    """HTTP 403 Forbidden."""
+
+
+class HttpNotFoundError(HttpError):
+    """HTTP 404 Not Found."""
+
+
+class HttpConflictError(HttpError):
+    """HTTP 409 Conflict."""
+
+
+_HTTP_ERRORS = {
+    400: HttpBadRequestError,
+    401: HttpUnauthorizedError,
+    403: HttpForbiddenError,
+    404: HttpNotFoundError,
+    409: HttpConflictError,
+}
 
 
 class InvalidUserInputError(InvalidDataError):
@@ -143,14 +181,15 @@
 
     """The response contained a retry-after header."""
 
-    def __init__(self, response, content, url, retry_after):
-        super(RetryAfterError, self).__init__(response, content, url)
+    def __init__(self, response, content, url, retry_after, **kwargs):
+        super(RetryAfterError, self).__init__(response, content, url, **kwargs)
         self.retry_after = int(retry_after)
 
     @classmethod
-    def FromResponse(cls, http_response):
+    def FromResponse(cls, http_response, **kwargs):
         return cls(http_response.info, http_response.content,
-                   http_response.request_url, http_response.retry_after)
+                   http_response.request_url, http_response.retry_after,
+                   **kwargs)
 
 
 class BadStatusCodeError(HttpError):
diff --git a/apitools/base/py/exceptions_test.py b/apitools/base/py/exceptions_test.py
new file mode 100644
index 0000000..4937f73
--- /dev/null
+++ b/apitools/base/py/exceptions_test.py
@@ -0,0 +1,74 @@
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest2
+
+from apitools.base.py import exceptions
+from apitools.base.py import http_wrapper
+
+
+def _MakeResponse(status_code):
+    return http_wrapper.Response(
+        info={'status': status_code}, content='{"field": "abc"}',
+        request_url='http://www.google.com')
+
+
+class HttpErrorFromResponseTest(unittest2.TestCase):
+
+    """Tests for exceptions.HttpError.FromResponse."""
+
+    def testBadRequest(self):
+        err = exceptions.HttpError.FromResponse(_MakeResponse(400))
+        self.assertIsInstance(err, exceptions.HttpError)
+        self.assertIsInstance(err, exceptions.HttpBadRequestError)
+        self.assertEquals(err.status_code, 400)
+
+    def testUnauthorized(self):
+        err = exceptions.HttpError.FromResponse(_MakeResponse(401))
+        self.assertIsInstance(err, exceptions.HttpError)
+        self.assertIsInstance(err, exceptions.HttpUnauthorizedError)
+        self.assertEquals(err.status_code, 401)
+
+    def testForbidden(self):
+        err = exceptions.HttpError.FromResponse(_MakeResponse(403))
+        self.assertIsInstance(err, exceptions.HttpError)
+        self.assertIsInstance(err, exceptions.HttpForbiddenError)
+        self.assertEquals(err.status_code, 403)
+
+    def testExceptionMessageIncludesErrorDetails(self):
+        err = exceptions.HttpError.FromResponse(_MakeResponse(403))
+        self.assertIn('403', repr(err))
+        self.assertIn('http://www.google.com', repr(err))
+        self.assertIn('{"field": "abc"}', repr(err))
+
+    def testNotFound(self):
+        err = exceptions.HttpError.FromResponse(_MakeResponse(404))
+        self.assertIsInstance(err, exceptions.HttpError)
+        self.assertIsInstance(err, exceptions.HttpNotFoundError)
+        self.assertEquals(err.status_code, 404)
+
+    def testConflict(self):
+        err = exceptions.HttpError.FromResponse(_MakeResponse(409))
+        self.assertIsInstance(err, exceptions.HttpError)
+        self.assertIsInstance(err, exceptions.HttpConflictError)
+        self.assertEquals(err.status_code, 409)
+
+    def testUnknownStatus(self):
+        err = exceptions.HttpError.FromResponse(_MakeResponse(499))
+        self.assertIsInstance(err, exceptions.HttpError)
+        self.assertEquals(err.status_code, 499)
+
+    def testMalformedStatus(self):
+        err = exceptions.HttpError.FromResponse(_MakeResponse('BAD'))
+        self.assertIsInstance(err, exceptions.HttpError)
diff --git a/apitools/base/py/extra_types.py b/apitools/base/py/extra_types.py
index 79a4900..847dc91 100644
--- a/apitools/base/py/extra_types.py
+++ b/apitools/base/py/extra_types.py
@@ -26,7 +26,7 @@
 from apitools.base.protorpclite import message_types
 from apitools.base.protorpclite import messages
 from apitools.base.protorpclite import protojson
-from apitools.base.py import encoding
+from apitools.base.py import encoding_helper as encoding
 from apitools.base.py import exceptions
 from apitools.base.py import util
 
@@ -286,6 +286,7 @@
     # Don't need to do anything special, they're decoded just fine
     return encoding.CodecResult(value=value, complete=False)
 
+
 encoding.RegisterFieldTypeCodec(_EncodeInt64Field, _DecodeInt64Field)(
     messages.IntegerField)
 
@@ -303,4 +304,5 @@
     date = datetime.datetime.strptime(value, '%Y-%m-%d').date()
     return encoding.CodecResult(value=date, complete=True)
 
+
 encoding.RegisterFieldTypeCodec(_EncodeDateField, _DecodeDateField)(DateField)
diff --git a/apitools/base/py/gzip.py b/apitools/base/py/gzip.py
new file mode 100644
index 0000000..e9eed14
--- /dev/null
+++ b/apitools/base/py/gzip.py
@@ -0,0 +1,617 @@
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All
+# Rights Reserved
+#
+# This is a backport from python 3.4 into python 2.7. Text and exclusive mode
+# support are removed as they're unsupported in 2.7. This backport patches a
+# streaming bug that exists in python 2.7.
+
+"""Functions that read and write gzipped files.
+
+The user of the file doesn't have to worry about the compression,
+but random access is not allowed."""
+
+# based on Andrew Kuchling's minigzip.py distributed with the zlib module
+
+import six
+from six.moves import builtins
+from six.moves import range
+
+import struct
+import sys
+import time
+import os
+import zlib
+import io
+
+__all__ = ["GzipFile", "open", "compress", "decompress"]
+
+FTEXT, FHCRC, FEXTRA, FNAME, FCOMMENT = 1, 2, 4, 8, 16
+
+READ, WRITE = 1, 2
+
+
+def open(filename, mode="rb", compresslevel=9):
+    """Shorthand for GzipFile(filename, mode, compresslevel).
+
+    The filename argument is required; mode defaults to 'rb'
+    and compresslevel defaults to 9.
+
+    """
+    return GzipFile(filename, mode, compresslevel)
+
+
+def write32u(output, value):
+    # The L format writes the bit pattern correctly whether signed
+    # or unsigned.
+    output.write(struct.pack("<L", value))
+
+
+class _PaddedFile(object):
+    """Minimal read-only file object that prepends a string to the contents
+    of an actual file. Shouldn't be used outside of gzip.py, as it lacks
+    essential functionality."""
+
+    def __init__(self, f, prepend=b''):
+        self._buffer = prepend
+        self._length = len(prepend)
+        self.file = f
+        self._read = 0
+
+    def read(self, size):
+        if self._read is None:
+            return self.file.read(size)
+        if self._read + size <= self._length:
+            read = self._read
+            self._read += size
+            return self._buffer[read:self._read]
+        else:
+            read = self._read
+            self._read = None
+            return self._buffer[read:] + \
+                self.file.read(size - self._length + read)
+
+    def prepend(self, prepend=b'', readprevious=False):
+        if self._read is None:
+            self._buffer = prepend
+        elif readprevious and len(prepend) <= self._read:
+            self._read -= len(prepend)
+            return
+        else:
+            self._buffer = self._buffer[self._read:] + prepend
+        self._length = len(self._buffer)
+        self._read = 0
+
+    def unused(self):
+        if self._read is None:
+            return b''
+        return self._buffer[self._read:]
+
+    def seek(self, offset, whence=0):
+        # This is only ever called with offset=whence=0
+        if whence == 1 and self._read is not None:
+            if 0 <= offset + self._read <= self._length:
+                self._read += offset
+                return
+            else:
+                offset += self._length - self._read
+        self._read = None
+        self._buffer = None
+        return self.file.seek(offset, whence)
+
+    def __getattr__(self, name):
+        return getattr(self.file, name)
+
+
+class GzipFile(io.BufferedIOBase):
+    """The GzipFile class simulates most of the methods of a file object with
+    the exception of the readinto() and truncate() methods.
+
+    This class only supports opening files in binary mode. If you need to open
+    a compressed file in text mode, use the gzip.open() function.
+
+    """
+
+    myfileobj = None
+    max_read_chunk = 10 * 1024 * 1024   # 10Mb
+
+    def __init__(self, filename=None, mode=None,
+                 compresslevel=9, fileobj=None, mtime=None):
+        """Constructor for the GzipFile class.
+
+        At least one of fileobj and filename must be given a
+        non-trivial value.
+
+        The new class instance is based on fileobj, which can be a regular
+        file, an io.BytesIO object, or any other object which simulates a file.
+        It defaults to None, in which case filename is opened to provide
+        a file object.
+
+        When fileobj is not None, the filename argument is only used to be
+        included in the gzip file header, which may includes the original
+        filename of the uncompressed file.  It defaults to the filename of
+        fileobj, if discernible; otherwise, it defaults to the empty string,
+        and in this case the original filename is not included in the header.
+
+        The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', or 'wb',
+        depending on whether the file will be read or written.  The default
+        is the mode of fileobj if discernible; otherwise, the default is 'rb'.
+        A mode of 'r' is equivalent to one of 'rb', and similarly for 'w' and
+        'wb', and 'a' and 'ab'.
+
+        The compresslevel argument is an integer from 0 to 9 controlling the
+        level of compression; 1 is fastest and produces the least compression,
+        and 9 is slowest and produces the most compression. 0 is no compression
+        at all. The default is 9.
+
+        The mtime argument is an optional numeric timestamp to be written
+        to the stream when compressing.  All gzip compressed streams
+        are required to contain a timestamp.  If omitted or None, the
+        current time is used.  This module ignores the timestamp when
+        decompressing; however, some programs, such as gunzip, make use
+        of it.  The format of the timestamp is the same as that of the
+        return value of time.time() and of the st_mtime member of the
+        object returned by os.stat().
+
+        """
+
+        if mode and ('t' in mode or 'U' in mode):
+            raise ValueError("Invalid mode: {!r}".format(mode))
+        if mode and 'b' not in mode:
+            mode += 'b'
+        if fileobj is None:
+            fileobj = self.myfileobj = builtins.open(filename, mode or 'rb')
+        if filename is None:
+            filename = getattr(fileobj, 'name', '')
+            if not isinstance(filename, six.string_types):
+                filename = ''
+        if mode is None:
+            mode = getattr(fileobj, 'mode', 'rb')
+
+        if mode.startswith('r'):
+            self.mode = READ
+            # Set flag indicating start of a new member
+            self._new_member = True
+            # Buffer data read from gzip file. extrastart is offset in
+            # stream where buffer starts. extrasize is number of
+            # bytes remaining in buffer from current stream position.
+            self.extrabuf = b""
+            self.extrasize = 0
+            self.extrastart = 0
+            self.name = filename
+            # Starts small, scales exponentially
+            self.min_readsize = 100
+            fileobj = _PaddedFile(fileobj)
+
+        elif mode.startswith(('w', 'a')):
+            self.mode = WRITE
+            self._init_write(filename)
+            self.compress = zlib.compressobj(compresslevel,
+                                             zlib.DEFLATED,
+                                             -zlib.MAX_WBITS,
+                                             zlib.DEF_MEM_LEVEL,
+                                             0)
+        else:
+            raise ValueError("Invalid mode: {!r}".format(mode))
+
+        self.fileobj = fileobj
+        self.offset = 0
+        self.mtime = mtime
+
+        if self.mode == WRITE:
+            self._write_gzip_header()
+
+    @property
+    def filename(self):
+        import warnings
+        warnings.warn("use the name attribute", DeprecationWarning, 2)
+        if self.mode == WRITE and self.name[-3:] != ".gz":
+            return self.name + ".gz"
+        return self.name
+
+    def __repr__(self):
+        fileobj = self.fileobj
+        if isinstance(fileobj, _PaddedFile):
+            fileobj = fileobj.file
+        s = repr(fileobj)
+        return '<gzip ' + s[1:-1] + ' ' + hex(id(self)) + '>'
+
+    def _check_closed(self):
+        """Raises a ValueError if the underlying file object has been closed.
+
+        """
+        if self.closed:
+            raise ValueError('I/O operation on closed file.')
+
+    def _init_write(self, filename):
+        self.name = filename
+        self.crc = zlib.crc32(b"") & 0xffffffff
+        self.size = 0
+        self.writebuf = []
+        self.bufsize = 0
+
+    def _write_gzip_header(self):
+        self.fileobj.write(b'\037\213')             # magic header
+        self.fileobj.write(b'\010')                 # compression method
+        try:
+            # RFC 1952 requires the FNAME field to be Latin-1. Do not
+            # include filenames that cannot be represented that way.
+            fname = os.path.basename(self.name)
+            if not isinstance(fname, six.binary_type):
+                fname = fname.encode('latin-1')
+            if fname.endswith(b'.gz'):
+                fname = fname[:-3]
+        except UnicodeEncodeError:
+            fname = b''
+        flags = 0
+        if fname:
+            flags = FNAME
+        self.fileobj.write(six.unichr(flags).encode('latin-1'))
+        mtime = self.mtime
+        if mtime is None:
+            mtime = time.time()
+        write32u(self.fileobj, int(mtime))
+        self.fileobj.write(b'\002')
+        self.fileobj.write(b'\377')
+        if fname:
+            self.fileobj.write(fname + b'\000')
+
+    def _init_read(self):
+        self.crc = zlib.crc32(b"") & 0xffffffff
+        self.size = 0
+
+    def _read_exact(self, n):
+        data = self.fileobj.read(n)
+        while len(data) < n:
+            b = self.fileobj.read(n - len(data))
+            if not b:
+                raise EOFError("Compressed file ended before the "
+                               "end-of-stream marker was reached")
+            data += b
+        return data
+
+    def _read_gzip_header(self):
+        magic = self.fileobj.read(2)
+        if magic == b'':
+            return False
+
+        if magic != b'\037\213':
+            raise OSError('Not a gzipped file')
+
+        method, flag, self.mtime = struct.unpack("<BBIxx", self._read_exact(8))
+        if method != 8:
+            raise OSError('Unknown compression method')
+
+        if flag & FEXTRA:
+            # Read & discard the extra field, if present
+            extra_len, = struct.unpack("<H", self._read_exact(2))
+            self._read_exact(extra_len)
+        if flag & FNAME:
+            # Read and discard a null-terminated string containing the filename
+            while True:
+                s = self.fileobj.read(1)
+                if not s or s == b'\000':
+                    break
+        if flag & FCOMMENT:
+            # Read and discard a null-terminated string containing a comment
+            while True:
+                s = self.fileobj.read(1)
+                if not s or s == b'\000':
+                    break
+        if flag & FHCRC:
+            self._read_exact(2)     # Read & discard the 16-bit header CRC
+
+        unused = self.fileobj.unused()
+        if unused:
+            uncompress = self.decompress.decompress(unused)
+            self._add_read_data(uncompress)
+        return True
+
+    def write(self, data):
+        self._check_closed()
+        if self.mode != WRITE:
+            import errno
+            raise OSError(errno.EBADF, "write() on read-only GzipFile object")
+
+        if self.fileobj is None:
+            raise ValueError("write() on closed GzipFile object")
+
+        # Convert data type if called by io.BufferedWriter.
+        if isinstance(data, memoryview):
+            data = data.tobytes()
+
+        if len(data) > 0:
+            self.fileobj.write(self.compress.compress(data))
+            self.size += len(data)
+            self.crc = zlib.crc32(data, self.crc) & 0xffffffff
+            self.offset += len(data)
+
+        return len(data)
+
+    def read(self, size=-1):
+        self._check_closed()
+        if self.mode != READ:
+            import errno
+            raise OSError(errno.EBADF, "read() on write-only GzipFile object")
+
+        if self.extrasize <= 0 and self.fileobj is None:
+            return b''
+
+        readsize = 1024
+        if size < 0:        # get the whole thing
+            while self._read(readsize):
+                readsize = min(self.max_read_chunk, readsize * 2)
+            size = self.extrasize
+        else:               # just get some more of it
+            while size > self.extrasize:
+                if not self._read(readsize):
+                    if size > self.extrasize:
+                        size = self.extrasize
+                    break
+                readsize = min(self.max_read_chunk, readsize * 2)
+
+        offset = self.offset - self.extrastart
+        chunk = self.extrabuf[offset: offset + size]
+        self.extrasize = self.extrasize - size
+
+        self.offset += size
+        return chunk
+
+    def read1(self, size=-1):
+        self._check_closed()
+        if self.mode != READ:
+            import errno
+            raise OSError(errno.EBADF, "read1() on write-only GzipFile object")
+
+        if self.extrasize <= 0 and self.fileobj is None:
+            return b''
+
+        # For certain input data, a single call to _read() may not return
+        # any data. In this case, retry until we get some data or reach EOF.
+        while self.extrasize <= 0 and self._read():
+            pass
+        if size < 0 or size > self.extrasize:
+            size = self.extrasize
+
+        offset = self.offset - self.extrastart
+        chunk = self.extrabuf[offset: offset + size]
+        self.extrasize -= size
+        self.offset += size
+        return chunk
+
+    def peek(self, n):
+        if self.mode != READ:
+            import errno
+            raise OSError(errno.EBADF, "peek() on write-only GzipFile object")
+
+        # Do not return ridiculously small buffers, for one common idiom
+        # is to call peek(1) and expect more bytes in return.
+        if n < 100:
+            n = 100
+        if self.extrasize == 0:
+            if self.fileobj is None:
+                return b''
+            # Ensure that we don't return b"" if we haven't reached EOF.
+            # 1024 is the same buffering heuristic used in read()
+            while self.extrasize == 0 and self._read(max(n, 1024)):
+                pass
+        offset = self.offset - self.extrastart
+        remaining = self.extrasize
+        assert remaining == len(self.extrabuf) - offset
+        return self.extrabuf[offset:offset + n]
+
+    def _unread(self, buf):
+        self.extrasize = len(buf) + self.extrasize
+        self.offset -= len(buf)
+
+    def _read(self, size=1024):
+        if self.fileobj is None:
+            return False
+
+        if self._new_member:
+            # If the _new_member flag is set, we have to
+            # jump to the next member, if there is one.
+            self._init_read()
+            if not self._read_gzip_header():
+                return False
+            self.decompress = zlib.decompressobj(-zlib.MAX_WBITS)
+            self._new_member = False
+
+        # Read a chunk of data from the file
+        buf = self.fileobj.read(size)
+
+        # If the EOF has been reached, flush the decompression object
+        # and mark this object as finished.
+
+        if buf == b"":
+            uncompress = self.decompress.flush()
+            # Prepend the already read bytes to the fileobj to they can be
+            # seen by _read_eof()
+            self.fileobj.prepend(self.decompress.unused_data, True)
+            self._read_eof()
+            self._add_read_data(uncompress)
+            return False
+
+        uncompress = self.decompress.decompress(buf)
+        self._add_read_data(uncompress)
+
+        if self.decompress.unused_data != b"":
+            # Ending case: we've come to the end of a member in the file,
+            # so seek back to the start of the unused data, finish up
+            # this member, and read a new gzip header.
+            # Prepend the already read bytes to the fileobj to they can be
+            # seen by _read_eof() and _read_gzip_header()
+            self.fileobj.prepend(self.decompress.unused_data, True)
+            # Check the CRC and file size, and set the flag so we read
+            # a new member on the next call
+            self._read_eof()
+            self._new_member = True
+        return True
+
+    def _add_read_data(self, data):
+        self.crc = zlib.crc32(data, self.crc) & 0xffffffff
+        offset = self.offset - self.extrastart
+        self.extrabuf = self.extrabuf[offset:] + data
+        self.extrasize = self.extrasize + len(data)
+        self.extrastart = self.offset
+        self.size = self.size + len(data)
+
+    def _read_eof(self):
+        # We've read to the end of the file
+        # We check the that the computed CRC and size of the
+        # uncompressed data matches the stored values.  Note that the size
+        # stored is the true file size mod 2**32.
+        crc32, isize = struct.unpack("<II", self._read_exact(8))
+        if crc32 != self.crc:
+            raise OSError("CRC check failed %s != %s" % (hex(crc32),
+                                                         hex(self.crc)))
+        elif isize != (self.size & 0xffffffff):
+            raise OSError("Incorrect length of data produced")
+
+        # Gzip files can be padded with zeroes and still have archives.
+        # Consume all zero bytes and set the file position to the first
+        # non-zero byte. See http://www.gzip.org/#faq8
+        c = b"\x00"
+        while c == b"\x00":
+            c = self.fileobj.read(1)
+        if c:
+            self.fileobj.prepend(c, True)
+
+    @property
+    def closed(self):
+        return self.fileobj is None
+
+    def close(self):
+        fileobj = self.fileobj
+        if fileobj is None:
+            return
+        self.fileobj = None
+        try:
+            if self.mode == WRITE:
+                fileobj.write(self.compress.flush())
+                write32u(fileobj, self.crc)
+                # self.size may exceed 2GB, or even 4GB
+                write32u(fileobj, self.size & 0xffffffff)
+        finally:
+            myfileobj = self.myfileobj
+            if myfileobj:
+                self.myfileobj = None
+                myfileobj.close()
+
+    def flush(self, zlib_mode=zlib.Z_SYNC_FLUSH):
+        self._check_closed()
+        if self.mode == WRITE:
+            # Ensure the compressor's buffer is flushed
+            self.fileobj.write(self.compress.flush(zlib_mode))
+            self.fileobj.flush()
+
+    def fileno(self):
+        """Invoke the underlying file object's fileno() method.
+
+        This will raise AttributeError if the underlying file object
+        doesn't support fileno().
+        """
+        return self.fileobj.fileno()
+
+    def rewind(self):
+        '''Return the uncompressed stream file position indicator to the
+        beginning of the file'''
+        if self.mode != READ:
+            raise OSError("Can't rewind in write mode")
+        self.fileobj.seek(0)
+        self._new_member = True
+        self.extrabuf = b""
+        self.extrasize = 0
+        self.extrastart = 0
+        self.offset = 0
+
+    def readable(self):
+        return self.mode == READ
+
+    def writable(self):
+        return self.mode == WRITE
+
+    def seekable(self):
+        return True
+
+    def seek(self, offset, whence=0):
+        if whence:
+            if whence == 1:
+                offset = self.offset + offset
+            else:
+                raise ValueError('Seek from end not supported')
+        if self.mode == WRITE:
+            if offset < self.offset:
+                raise OSError('Negative seek in write mode')
+            count = offset - self.offset
+            chunk = bytes(1024)
+            for i in range(count // 1024):
+                self.write(chunk)
+            self.write(bytes(count % 1024))
+        elif self.mode == READ:
+            if offset < self.offset:
+                # for negative seek, rewind and do positive seek
+                self.rewind()
+            count = offset - self.offset
+            for i in range(count // 1024):
+                self.read(1024)
+            self.read(count % 1024)
+
+        return self.offset
+
+    def readline(self, size=-1):
+        if size < 0:
+            # Shortcut common case - newline found in buffer.
+            offset = self.offset - self.extrastart
+            i = self.extrabuf.find(b'\n', offset) + 1
+            if i > 0:
+                self.extrasize -= i - offset
+                self.offset += i - offset
+                return self.extrabuf[offset: i]
+
+            size = sys.maxsize
+            readsize = self.min_readsize
+        else:
+            readsize = size
+        bufs = []
+        while size != 0:
+            c = self.read(readsize)
+            i = c.find(b'\n')
+
+            # We set i=size to break out of the loop under two
+            # conditions: 1) there's no newline, and the chunk is
+            # larger than size, or 2) there is a newline, but the
+            # resulting line would be longer than 'size'.
+            if (size <= i) or (i == -1 and len(c) > size):
+                i = size - 1
+
+            if i >= 0 or c == b'':
+                bufs.append(c[:i + 1])    # Add portion of last chunk
+                self._unread(c[i + 1:])   # Push back rest of chunk
+                break
+
+            # Append chunk to list, decrease 'size',
+            bufs.append(c)
+            size = size - len(c)
+            readsize = min(size, readsize * 2)
+        if readsize > self.min_readsize:
+            self.min_readsize = min(readsize, self.min_readsize * 2, 512)
+        return b''.join(bufs)  # Return resulting line
+
+
+def compress(data, compresslevel=9):
+    """Compress data in one shot and return the compressed string.
+    Optional argument is the compression level, in range of 0-9.
+    """
+    buf = io.BytesIO()
+    with GzipFile(fileobj=buf, mode='wb', compresslevel=compresslevel) as f:
+        f.write(data)
+    return buf.getvalue()
+
+
+def decompress(data):
+    """Decompress a gzip compressed string in one shot.
+    Return the decompressed string.
+    """
+    with GzipFile(fileobj=io.BytesIO(data)) as f:
+        return f.read()
diff --git a/apitools/base/py/gzip_test.py b/apitools/base/py/gzip_test.py
new file mode 100644
index 0000000..2d7d458
--- /dev/null
+++ b/apitools/base/py/gzip_test.py
@@ -0,0 +1,514 @@
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+# 2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All
+# Rights Reserved
+#
+# This is a backport from python 3.4 into python 2.7. Text and exclusive mode
+# support are removed as they're unsupported in 2.7. This backport patches a
+# streaming bug that exists in python 2.7.
+
+"""Test script for the gzip module.
+"""
+
+import six
+from six.moves import range
+
+import unittest
+import os
+import io
+import struct
+from apitools.base.py import gzip
+from io import open
+
+data1 = b"""  int length=DEFAULTALLOC, err = Z_OK;
+  PyObject *RetVal;
+  int flushmode = Z_FINISH;
+  unsigned long start_total_out;
+
+"""
+
+data2 = b"""/* zlibmodule.c -- gzip-compatible data compression */
+/* See http://www.gzip.org/zlib/
+/* See http://www.winimage.com/zLibDll for Windows */
+"""
+
+
+def unlink(filename):
+    try:
+        os.unlink(filename)
+    except:
+        pass
+
+
+class UnseekableIO(io.BytesIO):
+    def seekable(self):
+        return False
+
+    def tell(self):
+        raise io.UnsupportedOperation
+
+    def seek(self, *args):
+        raise io.UnsupportedOperation
+
+
+class BaseTest(unittest.TestCase):
+    filename = "@test"
+
+    def setUp(self):
+        unlink(self.filename)
+
+    def tearDown(self):
+        unlink(self.filename)
+
+
+class TestGzip(BaseTest):
+    def write_and_read_back(self, data, mode='b'):
+        b_data = bytes(data)
+        with gzip.GzipFile(self.filename, 'w' + mode) as f:
+            l = f.write(data)
+        self.assertEqual(l, len(b_data))
+        with gzip.GzipFile(self.filename, 'r' + mode) as f:
+            self.assertEqual(f.read(), b_data)
+
+    def test_write(self):
+        with gzip.GzipFile(self.filename, 'wb') as f:
+            f.write(data1 * 50)
+
+            # Try flush and fileno.
+            f.flush()
+            f.fileno()
+            if hasattr(os, 'fsync'):
+                os.fsync(f.fileno())
+            f.close()
+
+        # Test multiple close() calls.
+        f.close()
+
+    # The following test_write_xy methods test that write accepts
+    # the corresponding bytes-like object type as input
+    # and that the data written equals bytes(xy) in all cases.
+    def test_write_memoryview(self):
+        data = memoryview(data1 * 50)
+        self.write_and_read_back(data.tobytes())
+        data = memoryview(bytes(range(256)))
+        self.write_and_read_back(data.tobytes())
+
+    def test_write_incompatible_type(self):
+        # Test that non-bytes-like types raise TypeError.
+        # Issue #21560: attempts to write incompatible types
+        # should not affect the state of the fileobject
+        with gzip.GzipFile(self.filename, 'wb') as f:
+            if six.PY2:
+                with self.assertRaises(UnicodeEncodeError):
+                    f.write(u'\xff')
+            elif six.PY3:
+                with self.assertRaises(TypeError):
+                    f.write(u'\xff')
+            with self.assertRaises(TypeError):
+                f.write([1])
+            f.write(data1)
+        with gzip.GzipFile(self.filename, 'rb') as f:
+            self.assertEqual(f.read(), data1)
+
+    def test_read(self):
+        self.test_write()
+        # Try reading.
+        with gzip.GzipFile(self.filename, 'r') as f:
+            d = f.read()
+        self.assertEqual(d, data1 * 50)
+
+    def test_read1(self):
+        self.test_write()
+        blocks = []
+        nread = 0
+        with gzip.GzipFile(self.filename, 'r') as f:
+            while True:
+                d = f.read1()
+                if not d:
+                    break
+                blocks.append(d)
+                nread += len(d)
+                # Check that position was updated correctly (see issue10791).
+                self.assertEqual(f.tell(), nread)
+        self.assertEqual(b''.join(blocks), data1 * 50)
+
+    def test_io_on_closed_object(self):
+        # Test that I/O operations on closed GzipFile objects raise a
+        # ValueError, just like the corresponding functions on file objects.
+
+        # Write to a file, open it for reading, then close it.
+        self.test_write()
+        f = gzip.GzipFile(self.filename, 'r')
+        f.close()
+        with self.assertRaises(ValueError):
+            f.read(1)
+        with self.assertRaises(ValueError):
+            f.seek(0)
+        with self.assertRaises(ValueError):
+            f.tell()
+        # Open the file for writing, then close it.
+        f = gzip.GzipFile(self.filename, 'w')
+        f.close()
+        with self.assertRaises(ValueError):
+            f.write(b'')
+        with self.assertRaises(ValueError):
+            f.flush()
+
+    def test_append(self):
+        self.test_write()
+        # Append to the previous file
+        with gzip.GzipFile(self.filename, 'ab') as f:
+            f.write(data2 * 15)
+
+        with gzip.GzipFile(self.filename, 'rb') as f:
+            d = f.read()
+        self.assertEqual(d, (data1 * 50) + (data2 * 15))
+
+    def test_many_append(self):
+        # Bug #1074261 was triggered when reading a file that contained
+        # many, many members.  Create such a file and verify that reading it
+        # works.
+        with gzip.GzipFile(self.filename, 'wb', 9) as f:
+            f.write(b'a')
+        for i in range(0, 200):
+            with gzip.GzipFile(self.filename, "ab", 9) as f:  # append
+                f.write(b'a')
+
+        # Try reading the file
+        with gzip.GzipFile(self.filename, "rb") as zgfile:
+            contents = b""
+            while 1:
+                ztxt = zgfile.read(8192)
+                contents += ztxt
+                if not ztxt:
+                    break
+        self.assertEqual(contents, b'a' * 201)
+
+    def test_buffered_reader(self):
+        # Issue #7471: a GzipFile can be wrapped in a BufferedReader for
+        # performance.
+        self.test_write()
+
+        with gzip.GzipFile(self.filename, 'rb') as f:
+            with io.BufferedReader(f) as r:
+                lines = [line for line in r]
+
+        self.assertEqual(lines, 50 * data1.splitlines(True))
+
+    def test_readline(self):
+        self.test_write()
+        # Try .readline() with varying line lengths
+
+        with gzip.GzipFile(self.filename, 'rb') as f:
+            line_length = 0
+            while 1:
+                L = f.readline(line_length)
+                if not L and line_length != 0:
+                    break
+                self.assertTrue(len(L) <= line_length)
+                line_length = (line_length + 1) % 50
+
+    def test_readlines(self):
+        self.test_write()
+        # Try .readlines()
+
+        with gzip.GzipFile(self.filename, 'rb') as f:
+            L = f.readlines()
+
+        with gzip.GzipFile(self.filename, 'rb') as f:
+            while 1:
+                L = f.readlines(150)
+                if L == []:
+                    break
+
+    def test_seek_read(self):
+        self.test_write()
+        # Try seek, read test
+
+        with gzip.GzipFile(self.filename) as f:
+            while 1:
+                oldpos = f.tell()
+                line1 = f.readline()
+                if not line1:
+                    break
+                newpos = f.tell()
+                f.seek(oldpos)  # negative seek
+                if len(line1) > 10:
+                    amount = 10
+                else:
+                    amount = len(line1)
+                line2 = f.read(amount)
+                self.assertEqual(line1[:amount], line2)
+                f.seek(newpos)  # positive seek
+
+    def test_seek_whence(self):
+        self.test_write()
+        # Try seek(whence=1), read test
+
+        with gzip.GzipFile(self.filename) as f:
+            f.read(10)
+            f.seek(10, whence=1)
+            y = f.read(10)
+        self.assertEqual(y, data1[20:30])
+
+    def test_seek_write(self):
+        # Try seek, write test
+        with gzip.GzipFile(self.filename, 'w') as f:
+            for pos in range(0, 256, 16):
+                f.seek(pos)
+                f.write(b'GZ\n')
+
+    def test_mode(self):
+        self.test_write()
+        with gzip.GzipFile(self.filename, 'r') as f:
+            self.assertEqual(f.myfileobj.mode, 'rb')
+
+    def test_1647484(self):
+        for mode in ('wb', 'rb'):
+            with gzip.GzipFile(self.filename, mode) as f:
+                self.assertTrue(hasattr(f, "name"))
+                self.assertEqual(f.name, self.filename)
+
+    def test_paddedfile_getattr(self):
+        self.test_write()
+        with gzip.GzipFile(self.filename, 'rb') as f:
+            self.assertTrue(hasattr(f.fileobj, "name"))
+            self.assertEqual(f.fileobj.name, self.filename)
+
+    def test_mtime(self):
+        mtime = 123456789
+        with gzip.GzipFile(self.filename, 'w', mtime=mtime) as fWrite:
+            fWrite.write(data1)
+        with gzip.GzipFile(self.filename) as fRead:
+            dataRead = fRead.read()
+            self.assertEqual(dataRead, data1)
+            self.assertTrue(hasattr(fRead, 'mtime'))
+            self.assertEqual(fRead.mtime, mtime)
+
+    def test_metadata(self):
+        mtime = 123456789
+
+        with gzip.GzipFile(self.filename, 'w', mtime=mtime) as fWrite:
+            fWrite.write(data1)
+
+        with open(self.filename, 'rb') as fRead:
+            # see RFC 1952: http://www.faqs.org/rfcs/rfc1952.html
+
+            idBytes = fRead.read(2)
+            self.assertEqual(idBytes, b'\x1f\x8b')  # gzip ID
+
+            cmByte = fRead.read(1)
+            self.assertEqual(cmByte, b'\x08')  # deflate
+
+            flagsByte = fRead.read(1)
+            self.assertEqual(flagsByte, b'\x08')  # only the FNAME flag is set
+
+            mtimeBytes = fRead.read(4)
+            self.assertEqual(mtimeBytes, struct.pack(
+                '<i', mtime))  # little-endian
+
+            xflByte = fRead.read(1)
+            self.assertEqual(xflByte, b'\x02')  # maximum compression
+
+            osByte = fRead.read(1)
+            self.assertEqual(osByte, b'\xff')  # OS "unknown" (OS-independent)
+
+            # Since the FNAME flag is set, the zero-terminated filename
+            # follows. RFC 1952 specifies that this is the name of the input
+            # file, if any. However, the gzip module defaults to storing the
+            # name of the output file in this field.
+            expected = self.filename.encode('Latin-1') + b'\x00'
+            nameBytes = fRead.read(len(expected))
+            self.assertEqual(nameBytes, expected)
+
+            # Since no other flags were set, the header ends here. Rather than
+            # process the compressed data, let's seek to the trailer.
+            fRead.seek(os.stat(self.filename).st_size - 8)
+
+            crc32Bytes = fRead.read(4)  # CRC32 of uncompressed data [data1]
+            self.assertEqual(crc32Bytes, b'\xaf\xd7d\x83')
+
+            isizeBytes = fRead.read(4)
+            self.assertEqual(isizeBytes, struct.pack('<i', len(data1)))
+
+    def test_with_open(self):
+        # GzipFile supports the context management protocol
+        with gzip.GzipFile(self.filename, "wb") as f:
+            f.write(b"xxx")
+        f = gzip.GzipFile(self.filename, "rb")
+        f.close()
+        try:
+            with f:
+                pass
+        except ValueError:
+            pass
+        else:
+            self.fail("__enter__ on a closed file didn't raise an exception")
+        try:
+            with gzip.GzipFile(self.filename, "wb") as f:
+                1 / 0
+        except ZeroDivisionError:
+            pass
+        else:
+            self.fail("1/0 didn't raise an exception")
+
+    def test_zero_padded_file(self):
+        with gzip.GzipFile(self.filename, "wb") as f:
+            f.write(data1 * 50)
+
+        # Pad the file with zeroes
+        with open(self.filename, "ab") as f:
+            f.write(b"\x00" * 50)
+
+        with gzip.GzipFile(self.filename, "rb") as f:
+            d = f.read()
+            self.assertEqual(d, data1 * 50, "Incorrect data in file")
+
+    def test_non_seekable_file(self):
+        uncompressed = data1 * 50
+        buf = UnseekableIO()
+        with gzip.GzipFile(fileobj=buf, mode="wb") as f:
+            f.write(uncompressed)
+        compressed = buf.getvalue()
+        buf = UnseekableIO(compressed)
+        with gzip.GzipFile(fileobj=buf, mode="rb") as f:
+            self.assertEqual(f.read(), uncompressed)
+
+    def test_peek(self):
+        uncompressed = data1 * 200
+        with gzip.GzipFile(self.filename, "wb") as f:
+            f.write(uncompressed)
+
+        def sizes():
+            while True:
+                for n in range(5, 50, 10):
+                    yield n
+
+        with gzip.GzipFile(self.filename, "rb") as f:
+            f.max_read_chunk = 33
+            nread = 0
+            for n in sizes():
+                s = f.peek(n)
+                if s == b'':
+                    break
+                self.assertEqual(f.read(len(s)), s)
+                nread += len(s)
+            self.assertEqual(f.read(100), b'')
+            self.assertEqual(nread, len(uncompressed))
+
+    def test_textio_readlines(self):
+        # Issue #10791: TextIOWrapper.readlines() fails when wrapping GzipFile.
+        lines = (data1 * 50).decode("ascii").splitlines(True)
+        self.test_write()
+        with gzip.GzipFile(self.filename, 'r') as f:
+            with io.TextIOWrapper(f, encoding="ascii") as t:
+                self.assertEqual(t.readlines(), lines)
+
+    def test_fileobj_from_fdopen(self):
+        # Issue #13781: Opening a GzipFile for writing fails when using a
+        # fileobj created with os.fdopen().
+        fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT)
+        with os.fdopen(fd, "wb") as f:
+            with gzip.GzipFile(fileobj=f, mode="w") as g:
+                pass
+
+    def test_bytes_filename(self):
+        str_filename = self.filename
+        try:
+            bytes_filename = str_filename.encode("ascii")
+        except UnicodeEncodeError:
+            self.skipTest("Temporary file name needs to be ASCII")
+        with gzip.GzipFile(bytes_filename, "wb") as f:
+            f.write(data1 * 50)
+        with gzip.GzipFile(bytes_filename, "rb") as f:
+            self.assertEqual(f.read(), data1 * 50)
+        # Sanity check that we are actually operating on the right file.
+        with gzip.GzipFile(str_filename, "rb") as f:
+            self.assertEqual(f.read(), data1 * 50)
+
+    # Testing compress/decompress shortcut functions
+
+    def test_compress(self):
+        for data in [data1, data2]:
+            for args in [(), (1,), (6,), (9,)]:
+                datac = gzip.compress(data, *args)
+                self.assertEqual(type(datac), bytes)
+                with gzip.GzipFile(fileobj=io.BytesIO(datac), mode="rb") as f:
+                    self.assertEqual(f.read(), data)
+
+    def test_decompress(self):
+        for data in (data1, data2):
+            buf = io.BytesIO()
+            with gzip.GzipFile(fileobj=buf, mode="wb") as f:
+                f.write(data)
+            self.assertEqual(gzip.decompress(buf.getvalue()), data)
+            # Roundtrip with compress
+            datac = gzip.compress(data)
+            self.assertEqual(gzip.decompress(datac), data)
+
+    def test_read_truncated(self):
+        data = data1 * 50
+        # Drop the CRC (4 bytes) and file size (4 bytes).
+        truncated = gzip.compress(data)[:-8]
+        with gzip.GzipFile(fileobj=io.BytesIO(truncated)) as f:
+            self.assertRaises(EOFError, f.read)
+        with gzip.GzipFile(fileobj=io.BytesIO(truncated)) as f:
+            self.assertEqual(f.read(len(data)), data)
+            self.assertRaises(EOFError, f.read, 1)
+        # Incomplete 10-byte header.
+        for i in range(2, 10):
+            with gzip.GzipFile(fileobj=io.BytesIO(truncated[:i])) as f:
+                self.assertRaises(EOFError, f.read, 1)
+
+    def test_read_with_extra(self):
+        # Gzip data with an extra field
+        gzdata = (b'\x1f\x8b\x08\x04\xb2\x17cQ\x02\xff'
+                  b'\x05\x00Extra'
+                  b'\x0bI-.\x01\x002\xd1Mx\x04\x00\x00\x00')
+        with gzip.GzipFile(fileobj=io.BytesIO(gzdata)) as f:
+            self.assertEqual(f.read(), b'Test')
+
+    def test_prepend_error(self):
+        # See issue #20875
+        with gzip.open(self.filename, "wb") as f:
+            f.write(data1)
+        with gzip.open(self.filename, "rb") as f:
+            f.fileobj.prepend()
+
+
+class TestOpen(BaseTest):
+    def test_binary_modes(self):
+        uncompressed = data1 * 50
+
+        with gzip.open(self.filename, "wb") as f:
+            f.write(uncompressed)
+        with open(self.filename, "rb") as f:
+            file_data = gzip.decompress(f.read())
+            self.assertEqual(file_data, uncompressed)
+
+        with gzip.open(self.filename, "rb") as f:
+            self.assertEqual(f.read(), uncompressed)
+
+        with gzip.open(self.filename, "ab") as f:
+            f.write(uncompressed)
+        with open(self.filename, "rb") as f:
+            file_data = gzip.decompress(f.read())
+            self.assertEqual(file_data, uncompressed * 2)
+
+    def test_implicit_binary_modes(self):
+        # Test implicit binary modes (no "b" or "t" in mode string).
+        uncompressed = data1 * 50
+
+        with gzip.open(self.filename, "w") as f:
+            f.write(uncompressed)
+        with open(self.filename, "rb") as f:
+            file_data = gzip.decompress(f.read())
+            self.assertEqual(file_data, uncompressed)
+
+        with gzip.open(self.filename, "r") as f:
+            self.assertEqual(f.read(), uncompressed)
+
+        with gzip.open(self.filename, "a") as f:
+            f.write(uncompressed)
+        with open(self.filename, "rb") as f:
+            file_data = gzip.decompress(f.read())
+            self.assertEqual(file_data, uncompressed * 2)
diff --git a/apitools/base/py/http_wrapper.py b/apitools/base/py/http_wrapper.py
index 7baf09f..a3fe65c 100644
--- a/apitools/base/py/http_wrapper.py
+++ b/apitools/base/py/http_wrapper.py
@@ -27,7 +27,6 @@
 import time
 
 import httplib2
-import oauth2client
 import six
 from six.moves import http_client
 from six.moves.urllib import parse
@@ -35,6 +34,12 @@
 from apitools.base.py import exceptions
 from apitools.base.py import util
 
+# pylint: disable=ungrouped-imports
+try:
+    from oauth2client.client import HttpAccessTokenRefreshError as TokenRefreshError  # noqa
+except ImportError:
+    from oauth2client.client import AccessTokenRefreshError as TokenRefreshError  # noqa
+
 __all__ = [
     'CheckResponse',
     'GetHttp',
@@ -279,8 +284,8 @@
         # oauth2client, need to handle it here.
         logging.debug('Response content was invalid (%s), retrying',
                       retry_args.exc)
-    elif (isinstance(retry_args.exc,
-                     oauth2client.client.HttpAccessTokenRefreshError) and
+    elif (isinstance(retry_args.exc, TokenRefreshError) and
+          hasattr(retry_args.exc, 'status') and
           (retry_args.exc.status == TOO_MANY_REQUESTS or
            retry_args.exc.status >= 500)):
         logging.debug(
@@ -296,7 +301,7 @@
         logging.debug('Response returned a retry-after header, retrying')
         retry_after = retry_args.exc.retry_after
     else:
-        raise  # pylint: disable=misplaced-bare-raise
+        raise retry_args.exc
     RebuildHttpConnections(retry_args.http)
     logging.debug('Retrying request to url %s after exception %s',
                   retry_args.http_request.url, retry_args.exc)
diff --git a/apitools/base/py/http_wrapper_test.py b/apitools/base/py/http_wrapper_test.py
index 5df107f..ce4c03e 100644
--- a/apitools/base/py/http_wrapper_test.py
+++ b/apitools/base/py/http_wrapper_test.py
@@ -17,7 +17,6 @@
 import socket
 
 import httplib2
-import oauth2client
 from six.moves import http_client
 import unittest2
 
@@ -26,6 +25,15 @@
 from apitools.base.py import exceptions
 from apitools.base.py import http_wrapper
 
+# pylint: disable=ungrouped-imports
+try:
+    from oauth2client.client import HttpAccessTokenRefreshError
+    from oauth2client.client import AccessTokenRefreshError
+    _TOKEN_REFRESH_STATUS_AVAILABLE = True
+except ImportError:
+    from oauth2client.client import AccessTokenRefreshError
+    _TOKEN_REFRESH_STATUS_AVAILABLE = False
+
 
 class _MockHttpRequest(object):
 
@@ -57,6 +65,51 @@
     def testRequestBodyWithLen(self):
         http_wrapper.Request(body='burrito')
 
+    @unittest2.skipIf(not _TOKEN_REFRESH_STATUS_AVAILABLE,
+                      'oauth2client<1.5 lacks HttpAccessTokenRefreshError.')
+    def testExceptionHandlerHttpAccessTokenError(self):
+        exception_arg = HttpAccessTokenRefreshError(status=503)
+        retry_args = http_wrapper.ExceptionRetryArgs(
+            http={'connections': {}}, http_request=_MockHttpRequest(),
+            exc=exception_arg, num_retries=0, max_retry_wait=0,
+            total_wait_sec=0)
+
+        # Disable time.sleep for this handler as it is called with
+        # a minimum value of 1 second.
+        with patch('time.sleep', return_value=None):
+            http_wrapper.HandleExceptionsAndRebuildHttpConnections(
+                retry_args)
+
+    @unittest2.skipIf(not _TOKEN_REFRESH_STATUS_AVAILABLE,
+                      'oauth2client<1.5 lacks HttpAccessTokenRefreshError.')
+    def testExceptionHandlerHttpAccessTokenErrorRaises(self):
+        exception_arg = HttpAccessTokenRefreshError(status=200)
+        retry_args = http_wrapper.ExceptionRetryArgs(
+            http={'connections': {}}, http_request=_MockHttpRequest(),
+            exc=exception_arg, num_retries=0, max_retry_wait=0,
+            total_wait_sec=0)
+
+        # Disable time.sleep for this handler as it is called with
+        # a minimum value of 1 second.
+        with self.assertRaises(HttpAccessTokenRefreshError):
+            with patch('time.sleep', return_value=None):
+                http_wrapper.HandleExceptionsAndRebuildHttpConnections(
+                    retry_args)
+
+    def testExceptionHandlerAccessTokenErrorRaises(self):
+        exception_arg = AccessTokenRefreshError()
+        retry_args = http_wrapper.ExceptionRetryArgs(
+            http={'connections': {}}, http_request=_MockHttpRequest(),
+            exc=exception_arg, num_retries=0, max_retry_wait=0,
+            total_wait_sec=0)
+
+        # Disable time.sleep for this handler as it is called with
+        # a minimum value of 1 second.
+        with self.assertRaises(AccessTokenRefreshError):
+            with patch('time.sleep', return_value=None):
+                http_wrapper.HandleExceptionsAndRebuildHttpConnections(
+                    retry_args)
+
     def testDefaultExceptionHandler(self):
         """Ensures exception handles swallows (retries)"""
         mock_http_content = 'content'.encode('utf8')
@@ -68,7 +121,6 @@
                 socket.gaierror(),
                 httplib2.ServerNotFoundError(),
                 ValueError(),
-                oauth2client.client.HttpAccessTokenRefreshError(status=503),
                 exceptions.RequestError(),
                 exceptions.BadStatusCodeError(
                     {'status': 503}, mock_http_content, 'url'),
diff --git a/apitools/base/py/list_pager.py b/apitools/base/py/list_pager.py
index 13b1cba..fb14c14 100644
--- a/apitools/base/py/list_pager.py
+++ b/apitools/base/py/list_pager.py
@@ -59,10 +59,20 @@
 
     """
     request = encoding.CopyProtoMessage(request)
-    if batch_size_attribute:
-        setattr(request, batch_size_attribute, batch_size)
     setattr(request, current_token_attribute, None)
     while limit is None or limit:
+        if batch_size_attribute:
+            # On Py3, None is not comparable so min() below will fail.
+            # On Py2, None is always less than any number so if batch_size
+            # is None, the request_batch_size will always be None regardless
+            # of the value of limit. This doesn't generally strike me as the
+            # correct behavior, but this change preserves the existing Py2
+            # behavior on Py3.
+            if batch_size is None:
+                request_batch_size = None
+            else:
+                request_batch_size = min(batch_size, limit or batch_size)
+            setattr(request, batch_size_attribute, request_batch_size)
         response = getattr(service, method)(request,
                                             global_params=global_params)
         items = getattr(response, field)
diff --git a/apitools/base/py/list_pager_test.py b/apitools/base/py/list_pager_test.py
index 3aafede..32dfea6 100644
--- a/apitools/base/py/list_pager_test.py
+++ b/apitools/base/py/list_pager_test.py
@@ -88,7 +88,7 @@
     def testYieldFromListPartial(self):
         self.mocked_client.column.List.Expect(
             messages.FusiontablesColumnListRequest(
-                maxResults=100,
+                maxResults=6,
                 pageToken=None,
                 tableId='mytable',
             ),
@@ -103,7 +103,7 @@
             ))
         self.mocked_client.column.List.Expect(
             messages.FusiontablesColumnListRequest(
-                maxResults=100,
+                maxResults=2,
                 pageToken='x',
                 tableId='mytable',
             ),
@@ -122,10 +122,80 @@
 
         self._AssertInstanceSequence(results, 6)
 
+    def testYieldFromListPaging(self):
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=5,
+                pageToken=None,
+                tableId='mytable',
+            ),
+            messages.ColumnList(
+                items=[
+                    messages.Column(name='c0'),
+                    messages.Column(name='c1'),
+                    messages.Column(name='c2'),
+                    messages.Column(name='c3'),
+                    messages.Column(name='c4'),
+                ],
+                nextPageToken='x',
+            ))
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=4,
+                pageToken='x',
+                tableId='mytable',
+            ),
+            messages.ColumnList(
+                items=[
+                    messages.Column(name='c5'),
+                    messages.Column(name='c6'),
+                    messages.Column(name='c7'),
+                    messages.Column(name='c8'),
+                ],
+            ))
+
+        client = fusiontables.FusiontablesV1(get_credentials=False)
+        request = messages.FusiontablesColumnListRequest(tableId='mytable')
+        results = list_pager.YieldFromList(client.column,
+                                           request,
+                                           limit=9,
+                                           batch_size=5)
+
+        self._AssertInstanceSequence(results, 9)
+
+    def testYieldFromListBatchSizeNone(self):
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=None,
+                pageToken=None,
+                tableId='mytable',
+            ),
+            messages.ColumnList(
+                items=[
+                    messages.Column(name='c0'),
+                    messages.Column(name='c1'),
+                    messages.Column(name='c2'),
+                    messages.Column(name='c3'),
+                    messages.Column(name='c4'),
+                    messages.Column(name='c5'),
+                    messages.Column(name='c6'),
+                ],
+                nextPageToken='x',
+            ))
+
+        client = fusiontables.FusiontablesV1(get_credentials=False)
+        request = messages.FusiontablesColumnListRequest(tableId='mytable')
+        results = list_pager.YieldFromList(client.column,
+                                           request,
+                                           limit=5,
+                                           batch_size=None)
+
+        self._AssertInstanceSequence(results, 5)
+
     def testYieldFromListEmpty(self):
         self.mocked_client.column.List.Expect(
             messages.FusiontablesColumnListRequest(
-                maxResults=100,
+                maxResults=6,
                 pageToken=None,
                 tableId='mytable',
             ),
diff --git a/apitools/base/py/testing/mock.py b/apitools/base/py/testing/mock.py
index 89adca5..3bd38ba 100644
--- a/apitools/base/py/testing/mock.py
+++ b/apitools/base/py/testing/mock.py
@@ -191,8 +191,40 @@
         self.__mocked_client = mocked_client
         self.__real_method = real_method
         self.method_config = real_method.method_config
+        config = self.method_config()
+        self.__request_type = getattr(self.__mocked_client.MESSAGES_MODULE,
+                                      config.request_type_name)
+        self.__response_type = getattr(self.__mocked_client.MESSAGES_MODULE,
+                                       config.response_type_name)
 
-    def Expect(self, request, response=None, exception=None, **unused_kwargs):
+    def _TypeCheck(self, msg, is_request):
+        """Ensure the given message is of the expected type of this method.
+
+        Args:
+          msg: The message instance to check.
+          is_request: True to validate against the expected request type,
+             False to validate against the expected response type.
+
+        Raises:
+          exceptions.ConfigurationValueError: If the type of the message was
+             not correct.
+        """
+        if is_request:
+            mode = 'request'
+            real_type = self.__request_type
+        else:
+            mode = 'response'
+            real_type = self.__response_type
+
+        if not isinstance(msg, real_type):
+            raise exceptions.ConfigurationValueError(
+                'Expected {} is not of the correct type for method [{}].\n'
+                '   Required: [{}]\n'
+                '   Given:    [{}]'.format(
+                    mode, self.__key, real_type, type(msg)))
+
+    def Expect(self, request, response=None, exception=None,
+               enable_type_checking=True, **unused_kwargs):
         """Add an expectation on the mocked method.
 
         Exactly one of response and exception should be specified.
@@ -202,12 +234,21 @@
           response: The response that should be returned or None if
               exception is provided.
           exception: An exception that should be thrown, or None.
-
+          enable_type_checking: When true, the message type of the request
+              and response (if provided) will be checked against the types
+              required by this method.
         """
         # TODO(jasmuth): the unused_kwargs provides a placeholder for
         # future things that can be passed to Expect(), like special
         # params to the method call.
 
+        # Ensure that the registered request and response mocks actually
+        # match what this method accepts and returns.
+        if enable_type_checking:
+            self._TypeCheck(request, is_request=True)
+            if response:
+                self._TypeCheck(response, is_request=False)
+
         # pylint: disable=protected-access
         # Class in same module.
         self.__mocked_client._request_responses.append(
diff --git a/apitools/base/py/testing/mock_test.py b/apitools/base/py/testing/mock_test.py
index d295f21..4afdf7b 100644
--- a/apitools/base/py/testing/mock_test.py
+++ b/apitools/base/py/testing/mock_test.py
@@ -21,7 +21,8 @@
 
 from apitools.base.protorpclite import messages
 
-import apitools.base.py as apitools_base
+from apitools.base.py import base_api
+from apitools.base.py import exceptions
 from apitools.base.py.testing import mock
 from samples.fusiontables_sample.fusiontables_v1 import \
     fusiontables_v1_client as fusiontables
@@ -34,7 +35,7 @@
         (name, potential_service)
         for name, potential_service in six.iteritems(api_client_class.__dict__)
         if (isinstance(potential_service, type) and
-            issubclass(potential_service, apitools_base.BaseApiService)))
+            issubclass(potential_service, base_api.BaseApiService)))
 
 
 class CustomException(Exception):
@@ -45,7 +46,8 @@
 
     def testMockFusionBasic(self):
         with mock.Client(fusiontables.FusiontablesV1) as client_class:
-            client_class.column.List.Expect(request=1, response=2)
+            client_class.column.List.Expect(
+                request=1, response=2, enable_type_checking=False)
             client = fusiontables.FusiontablesV1(get_credentials=False)
             self.assertEqual(client.column.List(1), 2)
             with self.assertRaises(mock.UnexpectedRequestException):
@@ -55,29 +57,68 @@
         with mock.Client(fusiontables.FusiontablesV1) as client_class:
             client_class.column.List.Expect(
                 request=1,
-                exception=apitools_base.HttpError({'status': 404}, '', ''))
+                exception=exceptions.HttpError({'status': 404}, '', ''),
+                enable_type_checking=False)
             client = fusiontables.FusiontablesV1(get_credentials=False)
-            with self.assertRaises(apitools_base.HttpError):
+            with self.assertRaises(exceptions.HttpError):
                 client.column.List(1)
 
+    def testMockFusionTypeChecking(self):
+        with mock.Client(fusiontables.FusiontablesV1) as client_class:
+            messages = client_class.MESSAGES_MODULE
+            client_class.column.List.Expect(
+                messages.FusiontablesColumnListRequest(tableId='foo'),
+                messages.ColumnList(items=[], totalItems=0))
+            client = fusiontables.FusiontablesV1(get_credentials=False)
+            self.assertEqual(
+                client.column.List(
+                    messages.FusiontablesColumnListRequest(tableId='foo')),
+                messages.ColumnList(items=[], totalItems=0))
+
+    def testMockFusionTypeCheckingErrors(self):
+        with mock.Client(fusiontables.FusiontablesV1) as client_class:
+            messages = client_class.MESSAGES_MODULE
+            # Wrong request type.
+            with self.assertRaises(exceptions.ConfigurationValueError):
+                client_class.column.List.Expect(
+                    messages.FusiontablesColumnInsertRequest(),
+                    messages.ColumnList(items=[], totalItems=0))
+            # Wrong response type.
+            with self.assertRaises(exceptions.ConfigurationValueError):
+                client_class.column.List.Expect(
+                    messages.FusiontablesColumnListRequest(tableId='foo'),
+                    messages.Column())
+            # No error if checking is disabled.
+            client_class.column.List.Expect(
+                messages.FusiontablesColumnInsertRequest(),
+                messages.Column(),
+                enable_type_checking=False)
+            client_class.column.List(
+                messages.FusiontablesColumnInsertRequest())
+
     def testMockIfAnotherException(self):
         with self.assertRaises(CustomException):
             with mock.Client(fusiontables.FusiontablesV1) as client_class:
-                client_class.column.List.Expect(request=1, response=2)
+                client_class.column.List.Expect(
+                    request=1, response=2, enable_type_checking=False)
                 raise CustomException('Something when wrong')
 
     def testMockFusionOrder(self):
         with mock.Client(fusiontables.FusiontablesV1) as client_class:
-            client_class.column.List.Expect(request=1, response=2)
-            client_class.column.List.Expect(request=2, response=1)
+            client_class.column.List.Expect(
+                request=1, response=2, enable_type_checking=False)
+            client_class.column.List.Expect(
+                request=2, response=1, enable_type_checking=False)
             client = fusiontables.FusiontablesV1(get_credentials=False)
             self.assertEqual(client.column.List(1), 2)
             self.assertEqual(client.column.List(2), 1)
 
     def testMockFusionWrongOrder(self):
         with mock.Client(fusiontables.FusiontablesV1) as client_class:
-            client_class.column.List.Expect(request=1, response=2)
-            client_class.column.List.Expect(request=2, response=1)
+            client_class.column.List.Expect(
+                request=1, response=2, enable_type_checking=False)
+            client_class.column.List.Expect(
+                request=2, response=1, enable_type_checking=False)
             client = fusiontables.FusiontablesV1(get_credentials=False)
             with self.assertRaises(mock.UnexpectedRequestException):
                 self.assertEqual(client.column.List(2), 1)
@@ -86,7 +127,8 @@
 
     def testMockFusionTooMany(self):
         with mock.Client(fusiontables.FusiontablesV1) as client_class:
-            client_class.column.List.Expect(request=1, response=2)
+            client_class.column.List.Expect(
+                request=1, response=2, enable_type_checking=False)
             client = fusiontables.FusiontablesV1(get_credentials=False)
             self.assertEqual(client.column.List(1), 2)
             with self.assertRaises(mock.UnexpectedRequestException):
@@ -95,8 +137,10 @@
     def testMockFusionTooFew(self):
         with self.assertRaises(mock.ExpectedRequestsException):
             with mock.Client(fusiontables.FusiontablesV1) as client_class:
-                client_class.column.List.Expect(request=1, response=2)
-                client_class.column.List.Expect(request=2, response=1)
+                client_class.column.List.Expect(
+                    request=1, response=2, enable_type_checking=False)
+                client_class.column.List.Expect(
+                    request=2, response=1, enable_type_checking=False)
                 client = fusiontables.FusiontablesV1(get_credentials=False)
                 self.assertEqual(client.column.List(1), 2)
 
diff --git a/apitools/base/py/transfer.py b/apitools/base/py/transfer.py
index 9fb63a8..e2541e3 100644
--- a/apitools/base/py/transfer.py
+++ b/apitools/base/py/transfer.py
@@ -30,6 +30,7 @@
 from six.moves import http_client
 
 from apitools.base.py import buffered_stream
+from apitools.base.py import compression
 from apitools.base.py import exceptions
 from apitools.base.py import http_wrapper
 from apitools.base.py import stream_slice
@@ -164,12 +165,12 @@
     def EnsureInitialized(self):
         if not self.initialized:
             raise exceptions.TransferInvalidError(
-                'Cannot use uninitialized %s', self._type_name)
+                'Cannot use uninitialized %s' % self._type_name)
 
     def EnsureUninitialized(self):
         if self.initialized:
             raise exceptions.TransferInvalidError(
-                'Cannot re-initialize %s', self._type_name)
+                'Cannot re-initialize %s' % self._type_name)
 
     def __del__(self):
         if self.__close_stream:
@@ -283,6 +284,7 @@
         http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,)
 
     def __SetTotal(self, info):
+        """Sets the total size based off info if possible otherwise 0."""
         if 'content-range' in info:
             _, _, total = info['content-range'].rpartition('/')
             if total != '*':
@@ -330,13 +332,16 @@
             self.StreamInChunks()
 
     def __NormalizeStartEnd(self, start, end=None):
+        """Normalizes start and end values based on total size."""
         if end is not None:
             if start < 0:
                 raise exceptions.TransferInvalidError(
-                    'Cannot have end index with negative start index')
+                    'Cannot have end index with negative start index ' +
+                    '[start=%d, end=%d]' % (start, end))
             elif start >= self.total_size:
                 raise exceptions.TransferInvalidError(
-                    'Cannot have start index greater than total size')
+                    'Cannot have start index greater than total size ' +
+                    '[start=%d, total_size=%d]' % (start, self.total_size))
             end = min(end, self.total_size - 1)
             if end < start:
                 raise exceptions.TransferInvalidError(
@@ -350,7 +355,7 @@
     def __SetRangeHeader(self, request, start, end=None):
         if start < 0:
             request.headers['range'] = 'bytes=%d' % start
-        elif end is None:
+        elif end is None or end < start:
             request.headers['range'] = 'bytes=%d-' % start
         else:
             request.headers['range'] = 'bytes=%d-%d' % (start, end)
@@ -421,7 +426,10 @@
                 raise exceptions.TransferRetryError(response.content)
         if response.status_code in (http_client.OK,
                                     http_client.PARTIAL_CONTENT):
-            self.stream.write(response.content)
+            try:
+                self.stream.write(six.ensure_binary(response.content))
+            except TypeError:
+                self.stream.write(six.ensure_text(response.content))
             self.__progress += response.length
             if response.info and 'content-encoding' in response.info:
                 # TODO(craigcitro): Handle the case where this changes over a
@@ -478,6 +486,13 @@
             response = self.__ProcessResponse(response)
             progress += response.length
             if response.length == 0:
+                if response.status_code == http_client.OK:
+                    # There can legitimately be no Content-Length header sent
+                    # in some cases (e.g., when there's a Transfer-Encoding
+                    # header) and if this was a 200 response (as opposed to
+                    # 206 Partial Content) we know we're done now without
+                    # looping further on received length.
+                    return
                 raise exceptions.TransferRetryError(
                     'Zero bytes unexpectedly returned in download response')
 
@@ -529,6 +544,26 @@
         self._ExecuteCallback(finish_callback, response)
 
 
+if six.PY3:
+    class MultipartBytesGenerator(email_generator.BytesGenerator):
+        """Generates a bytes Message object tree for multipart messages
+
+        This is a BytesGenerator that has been modified to not attempt line
+        termination character modification in the bytes payload. Known to
+        work with the compat32 policy only. It may work on others, but not
+        tested. The outfp object must accept bytes in its write method.
+        """
+        def _handle_text(self, msg):
+            # If the string has surrogates the original source was bytes, so
+            # just write it back out.
+            if msg._payload is None:
+                return
+            self.write(msg._payload)
+
+        # Default body handler
+        _writeBody = _handle_text
+
+
 class Upload(_Transfer):
 
     """Data for a single Upload.
@@ -548,7 +583,7 @@
     def __init__(self, stream, mime_type, total_size=None, http=None,
                  close_stream=False, chunksize=None, auto_transfer=True,
                  progress_callback=None, finish_callback=None,
-                 **kwds):
+                 gzip_encoded=False, **kwds):
         super(Upload, self).__init__(
             stream, close_stream=close_stream, chunksize=chunksize,
             auto_transfer=auto_transfer, http=http, **kwds)
@@ -559,6 +594,7 @@
         self.__server_chunk_granularity = None
         self.__strategy = None
         self.__total_size = None
+        self.__gzip_encoded = gzip_encoded
 
         self.progress_callback = progress_callback
         self.finish_callback = finish_callback
@@ -569,7 +605,8 @@
         return self.__progress
 
     @classmethod
-    def FromFile(cls, filename, mime_type=None, auto_transfer=True, **kwds):
+    def FromFile(cls, filename, mime_type=None, auto_transfer=True,
+                 gzip_encoded=False, **kwds):
         """Create a new Upload object from a filename."""
         path = os.path.expanduser(filename)
         if not os.path.exists(path):
@@ -581,20 +618,23 @@
                     'Could not guess mime type for %s' % path)
         size = os.stat(path).st_size
         return cls(open(path, 'rb'), mime_type, total_size=size,
-                   close_stream=True, auto_transfer=auto_transfer, **kwds)
+                   close_stream=True, auto_transfer=auto_transfer,
+                   gzip_encoded=gzip_encoded, **kwds)
 
     @classmethod
     def FromStream(cls, stream, mime_type, total_size=None, auto_transfer=True,
-                   **kwds):
+                   gzip_encoded=False, **kwds):
         """Create a new Upload object from a stream."""
         if mime_type is None:
             raise exceptions.InvalidUserInputError(
                 'No mime_type specified for stream')
         return cls(stream, mime_type, total_size=total_size,
-                   close_stream=False, auto_transfer=auto_transfer, **kwds)
+                   close_stream=False, auto_transfer=auto_transfer,
+                   gzip_encoded=gzip_encoded, **kwds)
 
     @classmethod
-    def FromData(cls, stream, json_data, http, auto_transfer=None, **kwds):
+    def FromData(cls, stream, json_data, http, auto_transfer=None,
+                 gzip_encoded=False, **kwds):
         """Create a new Upload of stream from serialized json_data and http."""
         info = json.loads(json_data)
         missing_keys = cls._REQUIRED_SERIALIZATION_KEYS - set(info.keys())
@@ -606,7 +646,8 @@
             raise exceptions.InvalidUserInputError(
                 'Cannot override total_size on serialized Upload')
         upload = cls.FromStream(stream, info['mime_type'],
-                                total_size=info.get('total_size'), **kwds)
+                                total_size=info.get('total_size'),
+                                gzip_encoded=gzip_encoded, **kwds)
         if isinstance(stream, io.IOBase) and not stream.seekable():
             raise exceptions.InvalidUserInputError(
                 'Cannot restart resumable upload on non-seekable stream')
@@ -723,6 +764,23 @@
             else:
                 url_builder.query_params['uploadType'] = 'media'
                 self.__ConfigureMediaRequest(http_request)
+            # Once the entire body is written, compress the body if configured
+            # to. Both multipart and media request uploads will read the
+            # entire stream into memory, which means full compression is also
+            # safe to perform. Because the strategy is set to SIMPLE_UPLOAD,
+            # StreamInChunks throws an exception, meaning double compression
+            # cannot happen.
+            if self.__gzip_encoded:
+                http_request.headers['Content-Encoding'] = 'gzip'
+                # Turn the body into a stream so that we can compress it, then
+                # read the compressed bytes.  In the event of a retry (e.g. if
+                # our access token has expired), we need to be able to re-read
+                # the body, which we can't do with a stream. So, we consume the
+                # bytes from the stream now and store them in a re-readable
+                # bytes container.
+                http_request.body = (
+                    compression.CompressStream(
+                        six.BytesIO(http_request.body))[0].read())
         else:
             url_builder.relative_path = upload_config.resumable_path
             url_builder.query_params['uploadType'] = 'resumable'
@@ -758,7 +816,7 @@
         #       `> ` to `From ` lines.
         fp = six.BytesIO()
         if six.PY3:
-            generator_class = email_generator.BytesGenerator
+            generator_class = MultipartBytesGenerator
         else:
             generator_class = email_generator.Generator
         g = generator_class(fp, mangle_from_=False)
@@ -861,9 +919,14 @@
         chunksize = chunksize or self.chunksize
         if chunksize % self.__server_chunk_granularity:
             raise exceptions.ConfigurationValueError(
-                'Server requires chunksize to be a multiple of %d',
+                'Server requires chunksize to be a multiple of %d' %
                 self.__server_chunk_granularity)
 
+    def __IsRetryable(self, response):
+        return (response.status_code >= 500 or
+                response.status_code == http_wrapper.TOO_MANY_REQUESTS or
+                response.retry_after)
+
     def __StreamMedia(self, callback=None, finish_callback=None,
                       additional_headers=None, use_chunks=True):
         """Helper function for StreamMedia / StreamInChunks."""
@@ -874,17 +937,44 @@
         finish_callback = finish_callback or self.finish_callback
         # final_response is set if we resumed an already-completed upload.
         response = self.__final_response
-        send_func = self.__SendChunk if use_chunks else self.__SendMediaBody
+
+        def CallSendChunk(start):
+            return self.__SendChunk(
+                start, additional_headers=additional_headers)
+
+        def CallSendMediaBody(start):
+            return self.__SendMediaBody(
+                start, additional_headers=additional_headers)
+
+        send_func = CallSendChunk if use_chunks else CallSendMediaBody
+        if not use_chunks and self.__gzip_encoded:
+            raise exceptions.InvalidUserInputError(
+                'Cannot gzip encode non-chunked upload')
         if use_chunks:
             self.__ValidateChunksize(self.chunksize)
         self.EnsureInitialized()
         while not self.complete:
-            response = send_func(self.stream.tell(),
-                                 additional_headers=additional_headers)
+            response = send_func(self.stream.tell())
             if response.status_code in (http_client.OK, http_client.CREATED):
                 self.__complete = True
                 break
-            self.__progress = self.__GetLastByte(response.info['range'])
+            if response.status_code not in (
+                    http_client.OK, http_client.CREATED,
+                    http_wrapper.RESUME_INCOMPLETE):
+                # Only raise an exception if the error is something we can't
+                # recover from.
+                if (self.strategy != RESUMABLE_UPLOAD or
+                        not self.__IsRetryable(response)):
+                    raise exceptions.HttpError.FromResponse(response)
+                # We want to reset our state to wherever the server left us
+                # before this failed request, and then raise.
+                self.RefreshResumableUploadState()
+
+                self._ExecuteCallback(callback, response)
+                continue
+
+            self.__progress = self.__GetLastByte(
+                self._GetRangeHeaderFromResponse(response))
             if self.progress + 1 != self.stream.tell():
                 # TODO(craigcitro): Add a better way to recover here.
                 raise exceptions.CommunicationError(
@@ -931,20 +1021,21 @@
 
     def __SendMediaRequest(self, request, end):
         """Request helper function for SendMediaBody & SendChunk."""
+        def CheckResponse(response):
+            if response is None:
+                # Caller shouldn't call us if the response is None,
+                # but handle anyway.
+                raise exceptions.RequestError(
+                    'Request to url %s did not return a response.' %
+                    response.request_url)
         response = http_wrapper.MakeRequest(
             self.bytes_http, request, retry_func=self.retry_func,
-            retries=self.num_retries)
-        if response.status_code not in (http_client.OK, http_client.CREATED,
-                                        http_wrapper.RESUME_INCOMPLETE):
-            # We want to reset our state to wherever the server left us
-            # before this failed request, and then raise.
-            self.RefreshResumableUploadState()
-            raise exceptions.HttpError.FromResponse(response)
+            retries=self.num_retries, check_response_func=CheckResponse)
         if response.status_code == http_wrapper.RESUME_INCOMPLETE:
             last_byte = self.__GetLastByte(
                 self._GetRangeHeaderFromResponse(response))
             if last_byte + 1 != end:
-                self.stream.seek(last_byte)
+                self.stream.seek(last_byte + 1)
         return response
 
     def __SendMediaBody(self, start, additional_headers=None):
@@ -976,7 +1067,17 @@
         """Send the specified chunk."""
         self.EnsureInitialized()
         no_log_body = self.total_size is None
-        if self.total_size is None:
+        request = http_wrapper.Request(url=self.url, http_method='PUT')
+        if self.__gzip_encoded:
+            request.headers['Content-Encoding'] = 'gzip'
+            body_stream, read_length, exhausted = compression.CompressStream(
+                self.stream, self.chunksize)
+            end = start + read_length
+            # If the stream length was previously unknown and the input stream
+            # is exhausted, then we're at the end of the stream.
+            if self.total_size is None and exhausted:
+                self.__total_size = end
+        elif self.total_size is None:
             # For the streaming resumable case, we need to detect when
             # we're at the end of the stream.
             body_stream = buffered_stream.BufferedStream(
@@ -995,8 +1096,7 @@
             body_stream = stream_slice.StreamSlice(self.stream, end - start)
         # TODO(craigcitro): Think about clearer errors on "no data in
         # stream".
-        request = http_wrapper.Request(url=self.url, http_method='PUT',
-                                       body=body_stream)
+        request.body = body_stream
         request.headers['Content-Type'] = self.mime_type
         if no_log_body:
             # Disable logging of streaming body.
diff --git a/apitools/base/py/transfer_test.py b/apitools/base/py/transfer_test.py
index a4c43e7..c68e77e 100644
--- a/apitools/base/py/transfer_test.py
+++ b/apitools/base/py/transfer_test.py
@@ -17,12 +17,15 @@
 """Tests for transfer.py."""
 import string
 
+import httplib2
 import mock
 import six
 from six.moves import http_client
 import unittest2
 
 from apitools.base.py import base_api
+from apitools.base.py import exceptions
+from apitools.base.py import gzip
 from apitools.base.py import http_wrapper
 from apitools.base.py import transfer
 
@@ -92,6 +95,46 @@
                              download._Download__ComputeEndByte(start),
                              msg='Failed on start={0}'.format(start))
 
+    def testDownloadThenStream(self):
+        bytes_http = object()
+        http = object()
+        download_stream = six.StringIO()
+        download = transfer.Download.FromStream(download_stream,
+                                                total_size=26)
+        download.bytes_http = bytes_http
+        base_url = 'https://part.one/'
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as make_request:
+            make_request.return_value = http_wrapper.Response(
+                info={
+                    'content-range': 'bytes 0-25/26',
+                    'status': http_client.OK,
+                },
+                content=string.ascii_lowercase,
+                request_url=base_url,
+            )
+            request = http_wrapper.Request(url='https://part.one/')
+            download.InitializeDownload(request, http=http)
+            self.assertEqual(1, make_request.call_count)
+            received_request = make_request.call_args[0][1]
+            self.assertEqual(base_url, received_request.url)
+            self.assertRangeAndContentRangeCompatible(
+                received_request, make_request.return_value)
+
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as make_request:
+            make_request.return_value = http_wrapper.Response(
+                info={
+                    'status': http_client.REQUESTED_RANGE_NOT_SATISFIABLE,
+                },
+                content='error',
+                request_url=base_url,
+            )
+            download.StreamInChunks()
+            self.assertEqual(1, make_request.call_count)
+            received_request = make_request.call_args[0][1]
+            self.assertEqual('bytes=26-', received_request.headers['range'])
+
     def testGetRange(self):
         for (start_byte, end_byte) in [(0, 25), (5, 15), (0, 0), (25, 25)]:
             bytes_http = object()
@@ -110,7 +153,7 @@
                                          (start_byte, end_byte),
                         'status': http_client.OK,
                     },
-                    content=string.ascii_lowercase[start_byte:end_byte+1],
+                    content=string.ascii_lowercase[start_byte:end_byte + 1],
                     request_url=base_url,
                 )
                 request = http_wrapper.Request(url='https://part.one/')
@@ -266,3 +309,268 @@
             self.assertEqual(url_builder.query_params['uploadType'], 'media')
             rewritten_upload_contents = http_request.body
             self.assertTrue(rewritten_upload_contents.endswith(upload_bytes))
+
+
+class UploadTest(unittest2.TestCase):
+
+    def setUp(self):
+        # Sample highly compressible data.
+        self.sample_data = b'abc' * 200
+        # Stream of the sample data.
+        self.sample_stream = six.BytesIO(self.sample_data)
+        # Sample url_builder.
+        self.url_builder = base_api._UrlBuilder('http://www.uploads.com')
+        # Sample request.
+        self.request = http_wrapper.Request(
+            'http://www.uploads.com',
+            headers={'content-type': 'text/plain'})
+        # Sample successful response.
+        self.response = http_wrapper.Response(
+            info={'status': http_client.OK,
+                  'location': 'http://www.uploads.com'},
+            content='',
+            request_url='http://www.uploads.com',)
+        # Sample failure response.
+        self.fail_response = http_wrapper.Response(
+            info={'status': http_client.SERVICE_UNAVAILABLE,
+                  'location': 'http://www.uploads.com'},
+            content='',
+            request_url='http://www.uploads.com',)
+
+    def testStreamInChunksCompressed(self):
+        """Test that StreamInChunks will handle compression correctly."""
+        # Create and configure the upload object.
+        upload = transfer.Upload(
+            stream=self.sample_stream,
+            mime_type='text/plain',
+            total_size=len(self.sample_data),
+            close_stream=False,
+            gzip_encoded=True)
+        upload.strategy = transfer.RESUMABLE_UPLOAD
+        # Set the chunk size so the entire stream is uploaded.
+        upload.chunksize = len(self.sample_data)
+        # Mock the upload to return the sample response.
+        with mock.patch.object(transfer.Upload,
+                               '_Upload__SendMediaRequest') as mock_result, \
+                mock.patch.object(http_wrapper,
+                                  'MakeRequest') as make_request:
+            mock_result.return_value = self.response
+            make_request.return_value = self.response
+
+            # Initialization.
+            upload.InitializeUpload(self.request, 'http')
+            upload.StreamInChunks()
+            # Get the uploaded request and end position of the stream.
+            (request, _), _ = mock_result.call_args_list[0]
+            # Ensure the mock was called.
+            self.assertTrue(mock_result.called)
+            # Ensure the correct content encoding was set.
+            self.assertEqual(request.headers['Content-Encoding'], 'gzip')
+            # Ensure the stream was compresed.
+            self.assertLess(len(request.body), len(self.sample_data))
+
+    def testStreamMediaCompressedFail(self):
+        """Test that non-chunked uploads raise an exception.
+
+        Ensure uploads with the compressed and resumable flags set called from
+        StreamMedia raise an exception. Those uploads are unsupported.
+        """
+        # Create the upload object.
+        upload = transfer.Upload(
+            stream=self.sample_stream,
+            mime_type='text/plain',
+            total_size=len(self.sample_data),
+            close_stream=False,
+            auto_transfer=True,
+            gzip_encoded=True)
+        upload.strategy = transfer.RESUMABLE_UPLOAD
+        # Mock the upload to return the sample response.
+        with mock.patch.object(http_wrapper,
+                               'MakeRequest') as make_request:
+            make_request.return_value = self.response
+
+            # Initialization.
+            upload.InitializeUpload(self.request, 'http')
+            # Ensure stream media raises an exception when the upload is
+            # compressed. Compression is not supported on non-chunked uploads.
+            with self.assertRaises(exceptions.InvalidUserInputError):
+                upload.StreamMedia()
+
+    def testAutoTransferCompressed(self):
+        """Test that automatic transfers are compressed.
+
+        Ensure uploads with the compressed, resumable, and automatic transfer
+        flags set call StreamInChunks. StreamInChunks is tested in an earlier
+        test.
+        """
+        # Create the upload object.
+        upload = transfer.Upload(
+            stream=self.sample_stream,
+            mime_type='text/plain',
+            total_size=len(self.sample_data),
+            close_stream=False,
+            gzip_encoded=True)
+        upload.strategy = transfer.RESUMABLE_UPLOAD
+        # Mock the upload to return the sample response.
+        with mock.patch.object(transfer.Upload,
+                               'StreamInChunks') as mock_result, \
+                mock.patch.object(http_wrapper,
+                                  'MakeRequest') as make_request:
+            mock_result.return_value = self.response
+            make_request.return_value = self.response
+
+            # Initialization.
+            upload.InitializeUpload(self.request, 'http')
+            # Ensure the mock was called.
+            self.assertTrue(mock_result.called)
+
+    def testMultipartCompressed(self):
+        """Test that multipart uploads are compressed."""
+        # Create the multipart configuration.
+        upload_config = base_api.ApiUploadInfo(
+            accept=['*/*'],
+            max_size=None,
+            simple_multipart=True,
+            simple_path=u'/upload',)
+        # Create the upload object.
+        upload = transfer.Upload(
+            stream=self.sample_stream,
+            mime_type='text/plain',
+            total_size=len(self.sample_data),
+            close_stream=False,
+            gzip_encoded=True)
+        # Set a body to trigger multipart configuration.
+        self.request.body = '{"body_field_one": 7}'
+        # Configure the request.
+        upload.ConfigureRequest(upload_config, self.request, self.url_builder)
+        # Ensure the request is a multipart request now.
+        self.assertEqual(
+            self.url_builder.query_params['uploadType'], 'multipart')
+        # Ensure the request is gzip encoded.
+        self.assertEqual(self.request.headers['Content-Encoding'], 'gzip')
+        # Ensure data is compressed
+        self.assertLess(len(self.request.body), len(self.sample_data))
+        # Ensure uncompressed data includes the sample data.
+        with gzip.GzipFile(fileobj=six.BytesIO(self.request.body)) as f:
+            original = f.read()
+            self.assertTrue(self.sample_data in original)
+
+    def testMediaCompressed(self):
+        """Test that media uploads are compressed."""
+        # Create the media configuration.
+        upload_config = base_api.ApiUploadInfo(
+            accept=['*/*'],
+            max_size=None,
+            simple_multipart=True,
+            simple_path=u'/upload',)
+        # Create the upload object.
+        upload = transfer.Upload(
+            stream=self.sample_stream,
+            mime_type='text/plain',
+            total_size=len(self.sample_data),
+            close_stream=False,
+            gzip_encoded=True)
+        # Configure the request.
+        upload.ConfigureRequest(upload_config, self.request, self.url_builder)
+        # Ensure the request is a media request now.
+        self.assertEqual(self.url_builder.query_params['uploadType'], 'media')
+        # Ensure the request is gzip encoded.
+        self.assertEqual(self.request.headers['Content-Encoding'], 'gzip')
+        # Ensure data is compressed
+        self.assertLess(len(self.request.body), len(self.sample_data))
+        # Ensure uncompressed data includes the sample data.
+        with gzip.GzipFile(fileobj=six.BytesIO(self.request.body)) as f:
+            original = f.read()
+            self.assertTrue(self.sample_data in original)
+
+    def HttpRequestSideEffect(self, responses=None):
+        responses = [(response.info, response.content)
+                     for response in responses]
+
+        def _side_effect(uri, **kwargs):  # pylint: disable=unused-argument
+            body = kwargs['body']
+            read_func = getattr(body, 'read', None)
+            if read_func:
+                # If the body is a stream, consume the stream.
+                body = read_func()
+            self.assertEqual(int(kwargs['headers']['content-length']),
+                             len(body))
+            return responses.pop(0)
+        return _side_effect
+
+    def testRetryRequestChunks(self):
+        """Test that StreamInChunks will retry correctly."""
+        refresh_response = http_wrapper.Response(
+            info={'status': http_wrapper.RESUME_INCOMPLETE,
+                  'location': 'http://www.uploads.com'},
+            content='',
+            request_url='http://www.uploads.com',)
+
+        # Create and configure the upload object.
+        bytes_http = httplib2.Http()
+        upload = transfer.Upload(
+            stream=self.sample_stream,
+            mime_type='text/plain',
+            total_size=len(self.sample_data),
+            close_stream=False,
+            http=bytes_http)
+
+        upload.strategy = transfer.RESUMABLE_UPLOAD
+        # Set the chunk size so the entire stream is uploaded.
+        upload.chunksize = len(self.sample_data)
+        # Mock the upload to return the sample response.
+        with mock.patch.object(bytes_http,
+                               'request') as make_request:
+            # This side effect also checks the request body.
+            responses = [
+                self.response,  # Initial request in InitializeUpload().
+                self.fail_response,  # 503 status code from server.
+                refresh_response,  # Refresh upload progress.
+                self.response,  # Successful request.
+            ]
+            make_request.side_effect = self.HttpRequestSideEffect(responses)
+
+            # Initialization.
+            upload.InitializeUpload(self.request, bytes_http)
+            upload.StreamInChunks()
+
+            # Ensure the mock was called the correct number of times.
+            self.assertEquals(make_request.call_count, len(responses))
+
+    def testStreamInChunks(self):
+        """Test StreamInChunks."""
+        resume_incomplete_responses = [http_wrapper.Response(
+            info={'status': http_wrapper.RESUME_INCOMPLETE,
+                  'location': 'http://www.uploads.com',
+                  'range': '0-{}'.format(end)},
+            content='',
+            request_url='http://www.uploads.com',) for end in [199, 399, 599]]
+        responses = [
+            self.response  # Initial request in InitializeUpload().
+        ] + resume_incomplete_responses + [
+            self.response,  # Successful request.
+        ]
+        # Create and configure the upload object.
+        bytes_http = httplib2.Http()
+        upload = transfer.Upload(
+            stream=self.sample_stream,
+            mime_type='text/plain',
+            total_size=len(self.sample_data),
+            close_stream=False,
+            http=bytes_http)
+
+        upload.strategy = transfer.RESUMABLE_UPLOAD
+        # Set the chunk size so the entire stream is uploaded.
+        upload.chunksize = 200
+        # Mock the upload to return the sample response.
+        with mock.patch.object(bytes_http,
+                               'request') as make_request:
+            # This side effect also checks the request body.
+            make_request.side_effect = self.HttpRequestSideEffect(responses)
+
+            # Initialization.
+            upload.InitializeUpload(self.request, bytes_http)
+            upload.StreamInChunks()
+
+            # Ensure the mock was called the correct number of times.
+            self.assertEquals(make_request.call_count, len(responses))
diff --git a/apitools/base/py/util.py b/apitools/base/py/util.py
index 112259e..ac1a44c 100644
--- a/apitools/base/py/util.py
+++ b/apitools/base/py/util.py
@@ -27,7 +27,7 @@
 import six.moves.urllib.request as urllib_request
 
 from apitools.base.protorpclite import messages
-from apitools.base.py import encoding
+from apitools.base.py import encoding_helper as encoding
 from apitools.base.py import exceptions
 
 __all__ = [
@@ -76,8 +76,10 @@
 def NormalizeScopes(scope_spec):
     """Normalize scope_spec to a set of strings."""
     if isinstance(scope_spec, six.string_types):
+        scope_spec = six.ensure_str(scope_spec)
         return set(scope_spec.split(' '))
     elif isinstance(scope_spec, collections.Iterable):
+        scope_spec = [six.ensure_str(x) for x in scope_spec]
         return set(scope_spec)
     raise exceptions.TypecheckError(
         'NormalizeScopes expected string or iterable, found %s' % (
@@ -224,6 +226,7 @@
             request_type, python_name=param_name)
         if field_remapping is not None:
             new_params[field_remapping] = new_params.pop(param_name)
+            param_name = field_remapping
         if isinstance(value, messages.Enum):
             new_params[param_name] = encoding.GetCustomJsonEnumMapping(
                 type(value), python_name=str(value)) or str(value)
diff --git a/apitools/base/py/util_test.py b/apitools/base/py/util_test.py
index a06d1a9..b2ece27 100644
--- a/apitools/base/py/util_test.py
+++ b/apitools/base/py/util_test.py
@@ -37,10 +37,13 @@
 
     str_field = messages.StringField(1)
     enum_field = messages.EnumField('AnEnum', 2)
+    enum_field_remapping = messages.EnumField('AnEnum', 3)
 
 
 encoding.AddCustomJsonFieldMapping(
     MessageWithRemappings, 'str_field', 'path_field')
+encoding.AddCustomJsonFieldMapping(
+    MessageWithRemappings, 'enum_field_remapping', 'enum_field_remapped')
 encoding.AddCustomJsonEnumMapping(
     MessageWithRemappings.AnEnum, 'value_one', 'ONE')
 
@@ -178,10 +181,12 @@
         params = {
             'str_field': 'foo',
             'enum_field': MessageWithRemappings.AnEnum.value_one,
+            'enum_field_remapping': MessageWithRemappings.AnEnum.value_one,
         }
         remapped_params = {
             'path_field': 'foo',
             'enum_field': 'ONE',
+            'enum_field_remapped': 'ONE',
         }
         self.assertEqual(remapped_params,
                          util.MapRequestParams(params, MessageWithRemappings))
diff --git a/apitools/data/apitools_client_secrets.json b/apitools/data/apitools_client_secrets.json
index 5761d14..7afd240 100644
--- a/apitools/data/apitools_client_secrets.json
+++ b/apitools/data/apitools_client_secrets.json
@@ -10,6 +10,6 @@
       "urn:ietf:wg:oauth:2.0:oob",
       "oob"
     ],
-    "token_uri": "https://accounts.google.com/o/oauth2/token"
+    "token_uri": "https://oauth2.googleapis.com/token"
   }
 }
diff --git a/apitools/gen/client_generation_test.py b/apitools/gen/client_generation_test.py
index 5e7932a..9146501 100644
--- a/apitools/gen/client_generation_test.py
+++ b/apitools/gen/client_generation_test.py
@@ -15,33 +15,37 @@
 
 """Test gen_client against all the APIs we use regularly."""
 
+import importlib
 import logging
 import os
+import six
 import subprocess
+import sys
 import tempfile
 
-import unittest2
-
 from apitools.gen import gen_client
 from apitools.gen import test_utils
 
+if six.PY2:
+    import unittest2 as unittest
+else:
+    import unittest
 
 _API_LIST = [
-    'drive.v2',
     'bigquery.v2',
     'compute.v1',
+    'drive.v3',
     'storage.v1',
 ]
 
 
-class ClientGenerationTest(unittest2.TestCase):
+class ClientGenerationTest(unittest.TestCase):
 
     def setUp(self):
         super(ClientGenerationTest, self).setUp()
         self.gen_client_binary = 'gen_client'
 
     @test_utils.SkipOnWindows
-    @test_utils.RunOnlyOnPython27
     def testGeneration(self):
         for api in _API_LIST:
             with test_utils.TempDir(change_to=True):
@@ -62,20 +66,7 @@
                     continue
                 self.assertEqual(0, retcode)
 
-                with tempfile.NamedTemporaryFile() as out:
-                    with tempfile.NamedTemporaryFile() as err:
-                        cmdline_args = [
-                            os.path.join(
-                                'generated', api.replace('.', '_') + '.py'),
-                            'help',
-                        ]
-                        retcode = subprocess.call(
-                            cmdline_args, stdout=out, stderr=err)
-                        with open(err.name, 'rb') as f:
-                            err_output = f.read()
-                # appcommands returns 1 on help
-                self.assertEqual(1, retcode)
-                if 'Traceback (most recent call last):' in err_output:
-                    err = '\n======\n%s======\n' % err_output
-                    self.fail(
-                        'Error raised in generated client:' + err)
+                sys.path.insert(0, os.path.join(os.getcwd(), 'generated'))
+                # Ensure we can import the generated client.
+                importlib.import_module('{}_{}_client'.format(
+                    *api.split('.')))
diff --git a/apitools/gen/command_registry.py b/apitools/gen/command_registry.py
deleted file mode 100644
index 486934f..0000000
--- a/apitools/gen/command_registry.py
+++ /dev/null
@@ -1,608 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2015 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Command registry for apitools."""
-
-import logging
-import textwrap
-
-from apitools.base.protorpclite import descriptor
-from apitools.base.protorpclite import messages
-from apitools.gen import extended_descriptor
-
-# This is a code generator; we're purposely verbose.
-# pylint:disable=too-many-statements
-
-_VARIANT_TO_FLAG_TYPE_MAP = {
-    messages.Variant.DOUBLE: 'float',
-    messages.Variant.FLOAT: 'float',
-    messages.Variant.INT64: 'string',
-    messages.Variant.UINT64: 'string',
-    messages.Variant.INT32: 'integer',
-    messages.Variant.BOOL: 'boolean',
-    messages.Variant.STRING: 'string',
-    messages.Variant.MESSAGE: 'string',
-    messages.Variant.BYTES: 'string',
-    messages.Variant.UINT32: 'integer',
-    messages.Variant.ENUM: 'enum',
-    messages.Variant.SINT32: 'integer',
-    messages.Variant.SINT64: 'integer',
-}
-
-
-class FlagInfo(messages.Message):
-
-    """Information about a flag and conversion to a message.
-
-    Fields:
-      name: name of this flag.
-      type: type of the flag.
-      description: description of the flag.
-      default: default value for this flag.
-      enum_values: if this flag is an enum, the list of possible
-          values.
-      required: whether or not this flag is required.
-      fv: name of the flag_values object where this flag should
-          be registered.
-      conversion: template for type conversion.
-      special: (boolean, default: False) If True, this flag doesn't
-          correspond to an attribute on the request.
-    """
-    name = messages.StringField(1)
-    type = messages.StringField(2)
-    description = messages.StringField(3)
-    default = messages.StringField(4)
-    enum_values = messages.StringField(5, repeated=True)
-    required = messages.BooleanField(6, default=False)
-    fv = messages.StringField(7)
-    conversion = messages.StringField(8)
-    special = messages.BooleanField(9, default=False)
-
-
-class ArgInfo(messages.Message):
-
-    """Information about a single positional command argument.
-
-    Fields:
-      name: argument name.
-      description: description of this argument.
-      conversion: template for type conversion.
-    """
-    name = messages.StringField(1)
-    description = messages.StringField(2)
-    conversion = messages.StringField(3)
-
-
-class CommandInfo(messages.Message):
-
-    """Information about a single command.
-
-    Fields:
-      name: name of this command.
-      class_name: name of the apitools_base.NewCmd class for this command.
-      description: description of this command.
-      flags: list of FlagInfo messages for the command-specific flags.
-      args: list of ArgInfo messages for the positional args.
-      request_type: name of the request type for this command.
-      client_method_path: path from the client object to the method
-          this command is wrapping.
-    """
-    name = messages.StringField(1)
-    class_name = messages.StringField(2)
-    description = messages.StringField(3)
-    flags = messages.MessageField(FlagInfo, 4, repeated=True)
-    args = messages.MessageField(ArgInfo, 5, repeated=True)
-    request_type = messages.StringField(6)
-    client_method_path = messages.StringField(7)
-    has_upload = messages.BooleanField(8, default=False)
-    has_download = messages.BooleanField(9, default=False)
-
-
-class CommandRegistry(object):
-
-    """Registry for CLI commands."""
-
-    def __init__(self, package, version, client_info, message_registry,
-                 root_package, base_files_package, protorpc_package, names):
-        self.__package = package
-        self.__version = version
-        self.__client_info = client_info
-        self.__names = names
-        self.__message_registry = message_registry
-        self.__root_package = root_package
-        self.__base_files_package = base_files_package
-        self.__protorpc_package = protorpc_package
-        self.__command_list = []
-        self.__global_flags = []
-
-    def Validate(self):
-        self.__message_registry.Validate()
-
-    def AddGlobalParameters(self, schema):
-        for field in schema.fields:
-            self.__global_flags.append(self.__FlagInfoFromField(field, schema))
-
-    def AddCommandForMethod(self, service_name, method_name, method_info,
-                            request, _):
-        """Add the given method as a command."""
-        command_name = self.__GetCommandName(method_info.method_id)
-        calling_path = '%s.%s' % (service_name, method_name)
-        request_type = self.__message_registry.LookupDescriptor(request)
-        description = method_info.description
-        if not description:
-            description = 'Call the %s method.' % method_info.method_id
-        field_map = dict((f.name, f) for f in request_type.fields)
-        args = []
-        arg_names = []
-        for field_name in method_info.ordered_params:
-            extended_field = field_map[field_name]
-            name = extended_field.name
-            args.append(ArgInfo(
-                name=name,
-                description=extended_field.description,
-                conversion=self.__GetConversion(extended_field, request_type),
-            ))
-            arg_names.append(name)
-        flags = []
-        for extended_field in sorted(request_type.fields,
-                                     key=lambda x: x.name):
-            field = extended_field.field_descriptor
-            if extended_field.name in arg_names:
-                continue
-            if self.__FieldIsRequired(field):
-                logging.warning(
-                    'Required field %s not in ordered_params for command %s',
-                    extended_field.name, command_name)
-            flags.append(self.__FlagInfoFromField(
-                extended_field, request_type, fv='fv'))
-        if method_info.upload_config:
-            # TODO(craigcitro): Consider adding additional flags to allow
-            # determining the filename from the object metadata.
-            upload_flag_info = FlagInfo(
-                name='upload_filename', type='string', default='',
-                description='Filename to use for upload.', fv='fv',
-                special=True)
-            flags.append(upload_flag_info)
-            mime_description = (
-                'MIME type to use for the upload. Only needed if '
-                'the extension on --upload_filename does not determine '
-                'the correct (or any) MIME type.')
-            mime_type_flag_info = FlagInfo(
-                name='upload_mime_type', type='string', default='',
-                description=mime_description, fv='fv', special=True)
-            flags.append(mime_type_flag_info)
-        if method_info.supports_download:
-            download_flag_info = FlagInfo(
-                name='download_filename', type='string', default='',
-                description='Filename to use for download.', fv='fv',
-                special=True)
-            flags.append(download_flag_info)
-            overwrite_description = (
-                'If True, overwrite the existing file when downloading.')
-            overwrite_flag_info = FlagInfo(
-                name='overwrite', type='boolean', default='False',
-                description=overwrite_description, fv='fv', special=True)
-            flags.append(overwrite_flag_info)
-        command_info = CommandInfo(
-            name=command_name,
-            class_name=self.__names.ClassName(command_name),
-            description=description,
-            flags=flags,
-            args=args,
-            request_type=request_type.full_name,
-            client_method_path=calling_path,
-            has_upload=bool(method_info.upload_config),
-            has_download=bool(method_info.supports_download)
-        )
-        self.__command_list.append(command_info)
-
-    def __LookupMessage(self, message, field):
-        message_type = self.__message_registry.LookupDescriptor(
-            '%s.%s' % (message.name, field.type_name))
-        if message_type is None:
-            message_type = self.__message_registry.LookupDescriptor(
-                field.type_name)
-        return message_type
-
-    def __GetCommandName(self, method_id):
-        command_name = method_id
-        prefix = '%s.' % self.__package
-        if command_name.startswith(prefix):
-            command_name = command_name[len(prefix):]
-        command_name = command_name.replace('.', '_')
-        return command_name
-
-    def __GetConversion(self, extended_field, extended_message):
-        """Returns a template for field type."""
-        field = extended_field.field_descriptor
-
-        type_name = ''
-        if field.variant in (messages.Variant.MESSAGE, messages.Variant.ENUM):
-            if field.type_name.startswith('apitools.base.protorpclite.'):
-                type_name = field.type_name
-            else:
-                field_message = self.__LookupMessage(extended_message, field)
-                if field_message is None:
-                    raise ValueError(
-                        'Could not find type for field %s' % field.name)
-                type_name = 'messages.%s' % field_message.full_name
-
-        template = ''
-        if field.variant in (messages.Variant.INT64, messages.Variant.UINT64):
-            template = 'int(%s)'
-        elif field.variant == messages.Variant.MESSAGE:
-            template = 'apitools_base.JsonToMessage(%s, %%s)' % type_name
-        elif field.variant == messages.Variant.ENUM:
-            template = '%s(%%s)' % type_name
-        elif field.variant == messages.Variant.STRING:
-            template = "%s.decode('utf8')"
-
-        if self.__FieldIsRepeated(extended_field.field_descriptor):
-            if template:
-                template = '[%s for x in %%s]' % (template % 'x')
-
-        return template
-
-    def __FieldIsRequired(self, field):
-        return field.label == descriptor.FieldDescriptor.Label.REQUIRED
-
-    def __FieldIsRepeated(self, field):
-        return field.label == descriptor.FieldDescriptor.Label.REPEATED
-
-    def __FlagInfoFromField(self, extended_field, extended_message, fv=''):
-        """Creates FlagInfo object for given field."""
-        field = extended_field.field_descriptor
-        flag_info = FlagInfo()
-        flag_info.name = str(field.name)
-        # TODO(craigcitro): We should key by variant.
-        flag_info.type = _VARIANT_TO_FLAG_TYPE_MAP[field.variant]
-        flag_info.description = extended_field.description
-        if field.default_value:
-            # TODO(craigcitro): Formatting?
-            flag_info.default = field.default_value
-        if flag_info.type == 'enum':
-            # TODO(craigcitro): Does protorpc do this for us?
-            enum_type = self.__LookupMessage(extended_message, field)
-            if enum_type is None:
-                raise ValueError('Cannot find enum type %s', field.type_name)
-            flag_info.enum_values = [x.name for x in enum_type.values]
-            # Note that this choice is completely arbitrary -- but we only
-            # push the value through if the user specifies it, so this
-            # doesn't hurt anything.
-            if flag_info.default is None:
-                flag_info.default = flag_info.enum_values[0]
-        if self.__FieldIsRequired(field):
-            flag_info.required = True
-        flag_info.fv = fv
-        flag_info.conversion = self.__GetConversion(
-            extended_field, extended_message)
-        return flag_info
-
-    def __PrintFlagDeclarations(self, printer):
-        """Writes out command line flag declarations."""
-        package = self.__client_info.package
-        function_name = '_Declare%sFlags' % (package[0].upper() + package[1:])
-        printer()
-        printer()
-        printer('def %s():', function_name)
-        with printer.Indent():
-            printer('"""Declare global flags in an idempotent way."""')
-            printer("if 'api_endpoint' in flags.FLAGS:")
-            with printer.Indent():
-                printer('return')
-            printer('flags.DEFINE_string(')
-            with printer.Indent('    '):
-                printer("'api_endpoint',")
-                printer('%r,', self.__client_info.base_url)
-                printer("'URL of the API endpoint to use.',")
-                printer("short_name='%s_url')", self.__package)
-            printer('flags.DEFINE_string(')
-            with printer.Indent('    '):
-                printer("'history_file',")
-                printer('%r,', '~/.%s.%s.history' %
-                        (self.__package, self.__version))
-                printer("'File with interactive shell history.')")
-            printer('flags.DEFINE_multistring(')
-            with printer.Indent('    '):
-                printer("'add_header', [],")
-                printer("'Additional http headers (as key=value strings). '")
-                printer("'Can be specified multiple times.')")
-            printer('flags.DEFINE_string(')
-            with printer.Indent('    '):
-                printer("'service_account_json_keyfile', '',")
-                printer("'Filename for a JSON service account key downloaded'")
-                printer("' from the Developer Console.')")
-            for flag_info in self.__global_flags:
-                self.__PrintFlag(printer, flag_info)
-        printer()
-        printer()
-        printer('FLAGS = flags.FLAGS')
-        printer('apitools_base_cli.DeclareBaseFlags()')
-        printer('%s()', function_name)
-
-    def __PrintGetGlobalParams(self, printer):
-        """Writes out GetGlobalParamsFromFlags function."""
-        printer('def GetGlobalParamsFromFlags():')
-        with printer.Indent():
-            printer('"""Return a StandardQueryParameters based on flags."""')
-            printer('result = messages.StandardQueryParameters()')
-
-            for flag_info in self.__global_flags:
-                rhs = 'FLAGS.%s' % flag_info.name
-                if flag_info.conversion:
-                    rhs = flag_info.conversion % rhs
-                printer('if FLAGS[%r].present:', flag_info.name)
-                with printer.Indent():
-                    printer('result.%s = %s', flag_info.name, rhs)
-            printer('return result')
-        printer()
-        printer()
-
-    def __PrintGetClient(self, printer):
-        """Writes out GetClientFromFlags function."""
-        printer('def GetClientFromFlags():')
-        with printer.Indent():
-            printer('"""Return a client object, configured from flags."""')
-            printer('log_request = FLAGS.log_request or '
-                    'FLAGS.log_request_response')
-            printer('log_response = FLAGS.log_response or '
-                    'FLAGS.log_request_response')
-            printer('api_endpoint = apitools_base.NormalizeApiEndpoint('
-                    'FLAGS.api_endpoint)')
-            printer("additional_http_headers = dict(x.split('=', 1) for x in "
-                    "FLAGS.add_header)")
-            printer('credentials_args = {')
-            with printer.Indent('    '):
-                printer("'service_account_json_keyfile': os.path.expanduser("
-                        'FLAGS.service_account_json_keyfile)')
-            printer('}')
-            printer('try:')
-            with printer.Indent():
-                printer('client = client_lib.%s(',
-                        self.__client_info.client_class_name)
-                with printer.Indent(indent='    '):
-                    printer('api_endpoint, log_request=log_request,')
-                    printer('log_response=log_response,')
-                    printer('credentials_args=credentials_args,')
-                    printer('additional_http_headers=additional_http_headers)')
-            printer('except apitools_base.CredentialsError as e:')
-            with printer.Indent():
-                printer("print 'Error creating credentials: %%s' %% e")
-                printer('sys.exit(1)')
-            printer('return client')
-        printer()
-        printer()
-
-    def __PrintCommandDocstring(self, printer, command_info):
-        with printer.CommentContext():
-            for line in textwrap.wrap('"""%s' % command_info.description,
-                                      printer.CalculateWidth()):
-                printer(line)
-            extended_descriptor.PrintIndentedDescriptions(
-                printer, command_info.args, 'Args')
-            extended_descriptor.PrintIndentedDescriptions(
-                printer, command_info.flags, 'Flags')
-            printer('"""')
-
-    def __PrintFlag(self, printer, flag_info):
-        """Writes out given flag definition."""
-        printer('flags.DEFINE_%s(', flag_info.type)
-        with printer.Indent(indent='    '):
-            printer('%r,', flag_info.name)
-            printer('%r,', flag_info.default)
-            if flag_info.type == 'enum':
-                printer('%r,', flag_info.enum_values)
-
-            # TODO(craigcitro): Consider using 'drop_whitespace' elsewhere.
-            description_lines = textwrap.wrap(
-                flag_info.description, 75 - len(printer.indent),
-                drop_whitespace=False)
-            for line in description_lines[:-1]:
-                printer('%r', line)
-            last_line = description_lines[-1] if description_lines else ''
-            printer('%r%s', last_line, ',' if flag_info.fv else ')')
-            if flag_info.fv:
-                printer('flag_values=%s)', flag_info.fv)
-        if flag_info.required:
-            printer('flags.MarkFlagAsRequired(%r)', flag_info.name)
-
-    def __PrintPyShell(self, printer):
-        """Writes out PyShell class."""
-        printer('class PyShell(appcommands.Cmd):')
-        printer()
-        with printer.Indent():
-            printer('def Run(self, _):')
-            with printer.Indent():
-                printer(
-                    '"""Run an interactive python shell with the client."""')
-                printer('client = GetClientFromFlags()')
-                printer('params = GetGlobalParamsFromFlags()')
-                printer('for field in params.all_fields():')
-                with printer.Indent():
-                    printer('value = params.get_assigned_value(field.name)')
-                    printer('if value != field.default:')
-                    with printer.Indent():
-                        printer('client.AddGlobalParam(field.name, value)')
-                printer('banner = """')
-                printer('       == %s interactive console ==' % (
-                    self.__client_info.package))
-                printer('             client: a %s client' %
-                        self.__client_info.package)
-                printer('      apitools_base: base apitools module')
-                printer('     messages: the generated messages module')
-                printer('"""')
-                printer('local_vars = {')
-                with printer.Indent(indent='    '):
-                    printer("'apitools_base': apitools_base,")
-                    printer("'client': client,")
-                    printer("'client_lib': client_lib,")
-                    printer("'messages': messages,")
-                printer('}')
-                printer("if platform.system() == 'Linux':")
-                with printer.Indent():
-                    printer('console = apitools_base_cli.ConsoleWithReadline(')
-                    with printer.Indent(indent='    '):
-                        printer('local_vars, histfile=FLAGS.history_file)')
-                printer('else:')
-                with printer.Indent():
-                    printer('console = code.InteractiveConsole(local_vars)')
-                printer('try:')
-                with printer.Indent():
-                    printer('console.interact(banner)')
-                printer('except SystemExit as e:')
-                with printer.Indent():
-                    printer('return e.code')
-        printer()
-        printer()
-
-    def WriteFile(self, printer):
-        """Write a simple CLI (currently just a stub)."""
-        printer('#!/usr/bin/env python')
-        printer('"""CLI for %s, version %s."""',
-                self.__package, self.__version)
-        printer('# NOTE: This file is autogenerated and should not be edited '
-                'by hand.')
-        # TODO(craigcitro): Add a build stamp, along with some other
-        # information.
-        printer()
-        printer('import code')
-        printer('import os')
-        printer('import platform')
-        printer('import sys')
-        printer()
-        printer('from %s import message_types', self.__protorpc_package)
-        printer('from %s import messages', self.__protorpc_package)
-        printer()
-        appcommands_import = 'from google.apputils import appcommands'
-        printer(appcommands_import)
-
-        flags_import = 'import gflags as flags'
-        printer(flags_import)
-        printer()
-        printer('import %s as apitools_base', self.__base_files_package)
-        printer('from %s import cli as apitools_base_cli',
-                self.__base_files_package)
-        import_prefix = ''
-        printer('%simport %s as client_lib',
-                import_prefix, self.__client_info.client_rule_name)
-        printer('%simport %s as messages',
-                import_prefix, self.__client_info.messages_rule_name)
-        self.__PrintFlagDeclarations(printer)
-        printer()
-        printer()
-        self.__PrintGetGlobalParams(printer)
-        self.__PrintGetClient(printer)
-        self.__PrintPyShell(printer)
-        self.__PrintCommands(printer)
-        printer('def main(_):')
-        with printer.Indent():
-            printer("appcommands.AddCmd('pyshell', PyShell)")
-            for command_info in self.__command_list:
-                printer("appcommands.AddCmd('%s', %s)",
-                        command_info.name, command_info.class_name)
-            printer()
-            printer('apitools_base_cli.SetupLogger()')
-            # TODO(craigcitro): Just call SetDefaultCommand as soon as
-            # another appcommands release happens and this exists
-            # externally.
-            printer("if hasattr(appcommands, 'SetDefaultCommand'):")
-            with printer.Indent():
-                printer("appcommands.SetDefaultCommand('pyshell')")
-        printer()
-        printer()
-        printer('run_main = apitools_base_cli.run_main')
-        printer()
-        printer("if __name__ == '__main__':")
-        with printer.Indent():
-            printer('appcommands.Run()')
-
-    def __PrintCommands(self, printer):
-        """Print all commands in this registry using printer."""
-        for command_info in self.__command_list:
-            arg_list = [arg_info.name for arg_info in command_info.args]
-            printer(
-                'class %s(apitools_base_cli.NewCmd):', command_info.class_name)
-            with printer.Indent():
-                printer('"""Command wrapping %s."""',
-                        command_info.client_method_path)
-                printer()
-                printer('usage = """%s%s%s"""',
-                        command_info.name,
-                        ' ' if arg_list else '',
-                        ' '.join('<%s>' % argname for argname in arg_list))
-                printer()
-                printer('def __init__(self, name, fv):')
-                with printer.Indent():
-                    printer('super(%s, self).__init__(name, fv)',
-                            command_info.class_name)
-                    for flag in command_info.flags:
-                        self.__PrintFlag(printer, flag)
-                printer()
-                printer('def RunWithArgs(%s):', ', '.join(['self'] + arg_list))
-                with printer.Indent():
-                    self.__PrintCommandDocstring(printer, command_info)
-                    printer('client = GetClientFromFlags()')
-                    printer('global_params = GetGlobalParamsFromFlags()')
-                    printer(
-                        'request = messages.%s(', command_info.request_type)
-                    with printer.Indent(indent='    '):
-                        for arg in command_info.args:
-                            rhs = arg.name
-                            if arg.conversion:
-                                rhs = arg.conversion % arg.name
-                            printer('%s=%s,', arg.name, rhs)
-                        printer(')')
-                    for flag_info in command_info.flags:
-                        if flag_info.special:
-                            continue
-                        rhs = 'FLAGS.%s' % flag_info.name
-                        if flag_info.conversion:
-                            rhs = flag_info.conversion % rhs
-                        printer('if FLAGS[%r].present:', flag_info.name)
-                        with printer.Indent():
-                            printer('request.%s = %s', flag_info.name, rhs)
-                    call_args = ['request', 'global_params=global_params']
-                    if command_info.has_upload:
-                        call_args.append('upload=upload')
-                        printer('upload = None')
-                        printer('if FLAGS.upload_filename:')
-                        with printer.Indent():
-                            printer('upload = apitools_base.Upload.FromFile(')
-                            printer('    FLAGS.upload_filename, '
-                                    'FLAGS.upload_mime_type,')
-                            printer('    progress_callback='
-                                    'apitools_base.UploadProgressPrinter,')
-                            printer('    finish_callback='
-                                    'apitools_base.UploadCompletePrinter)')
-                    if command_info.has_download:
-                        call_args.append('download=download')
-                        printer('download = None')
-                        printer('if FLAGS.download_filename:')
-                        with printer.Indent():
-                            printer('download = apitools_base.Download.'
-                                    'FromFile(FLAGS.download_filename, '
-                                    'overwrite=FLAGS.overwrite,')
-                            printer('    progress_callback='
-                                    'apitools_base.DownloadProgressPrinter,')
-                            printer('    finish_callback='
-                                    'apitools_base.DownloadCompletePrinter)')
-                    printer(
-                        'result = client.%s(', command_info.client_method_path)
-                    with printer.Indent(indent='    '):
-                        printer('%s)', ', '.join(call_args))
-                    printer('print apitools_base_cli.FormatOutput(result)')
-            printer()
-            printer()
diff --git a/apitools/gen/extended_descriptor.py b/apitools/gen/extended_descriptor.py
index c5d9909..52b34a1 100644
--- a/apitools/gen/extended_descriptor.py
+++ b/apitools/gen/extended_descriptor.py
@@ -36,7 +36,7 @@
 from apitools.base.protorpclite import descriptor as protorpc_descriptor
 from apitools.base.protorpclite import message_types
 from apitools.base.protorpclite import messages
-import apitools.base.py as apitools_base
+from apitools.base.py import extra_types
 
 
 class ExtendedEnumValueDescriptor(messages.Message):
@@ -164,11 +164,9 @@
     proto_printer.PrintPreamble(package, version, file_descriptor)
     _PrintEnums(proto_printer, file_descriptor.enum_types)
     _PrintMessages(proto_printer, file_descriptor.message_types)
-    custom_json_mappings = _FetchCustomMappings(
-        file_descriptor.enum_types, file_descriptor.package)
+    custom_json_mappings = _FetchCustomMappings(file_descriptor.enum_types)
     custom_json_mappings.extend(
-        _FetchCustomMappings(
-            file_descriptor.message_types, file_descriptor.package))
+        _FetchCustomMappings(file_descriptor.message_types))
     for mapping in custom_json_mappings:
         proto_printer.PrintCustomJsonMapping(mapping)
 
@@ -200,31 +198,30 @@
                         printer(line)
 
 
-def _FetchCustomMappings(descriptor_ls, package):
+def _FetchCustomMappings(descriptor_ls):
     """Find and return all custom mappings for descriptors in descriptor_ls."""
     custom_mappings = []
     for descriptor in descriptor_ls:
         if isinstance(descriptor, ExtendedEnumDescriptor):
             custom_mappings.extend(
-                _FormatCustomJsonMapping('Enum', m, descriptor, package)
+                _FormatCustomJsonMapping('Enum', m, descriptor)
                 for m in descriptor.enum_mappings)
         elif isinstance(descriptor, ExtendedMessageDescriptor):
             custom_mappings.extend(
-                _FormatCustomJsonMapping('Field', m, descriptor, package)
+                _FormatCustomJsonMapping('Field', m, descriptor)
                 for m in descriptor.field_mappings)
             custom_mappings.extend(
-                _FetchCustomMappings(descriptor.enum_types, package))
+                _FetchCustomMappings(descriptor.enum_types))
             custom_mappings.extend(
-                _FetchCustomMappings(descriptor.message_types, package))
+                _FetchCustomMappings(descriptor.message_types))
     return custom_mappings
 
 
-def _FormatCustomJsonMapping(mapping_type, mapping, descriptor, package):
+def _FormatCustomJsonMapping(mapping_type, mapping, descriptor):
     return '\n'.join((
         'encoding.AddCustomJson%sMapping(' % mapping_type,
-        "    %s, '%s', '%s'," % (descriptor.full_name, mapping.python_name,
+        "    %s, '%s', '%s')" % (descriptor.full_name, mapping.python_name,
                                  mapping.json_name),
-        '    package=%r)' % package,
     ))
 
 
@@ -376,7 +373,7 @@
 
     def __PrintEnumDocstringLines(self, enum_type):
         description = enum_type.description or '%s enum type.' % enum_type.name
-        for line in textwrap.wrap('"""%s' % description,
+        for line in textwrap.wrap('r"""%s' % description,
                                   self.__printer.CalculateWidth()):
             self.__printer(line)
         PrintIndentedDescriptions(self.__printer, enum_type.values, 'Values')
@@ -436,9 +433,9 @@
             if short_description:
                 # Note that we use explicit string interpolation here since
                 # we're in comment context.
-                self.__printer('"""%s"""' % description)
+                self.__printer('r"""%s"""' % description)
                 return
-            for line in textwrap.wrap('"""%s' % description,
+            for line in textwrap.wrap('r"""%s' % description,
                                       self.__printer.CalculateWidth()):
                 self.__printer(line)
 
@@ -510,7 +507,7 @@
             field_type = message_field
         elif field.type_name == 'extra_types.DateField':
             printed_field_info['module'] = 'extra_types'
-            field_type = apitools_base.DateField
+            field_type = extra_types.DateField
         else:
             field_type = messages.Field.lookup_field_type_by_variant(
                 field.variant)
diff --git a/apitools/gen/gen_client.py b/apitools/gen/gen_client.py
index c36fbc4..f842227 100644
--- a/apitools/gen/gen_client.py
+++ b/apitools/gen/gen_client.py
@@ -18,6 +18,7 @@
 
 import argparse
 import contextlib
+import io
 import json
 import logging
 import os
@@ -30,7 +31,7 @@
 
 
 def _CopyLocalFile(filename):
-    with contextlib.closing(open(filename, 'w')) as out:
+    with contextlib.closing(io.open(filename, 'w')) as out:
         src_data = pkgutil.get_data(
             'apitools.base.py', filename)
         if src_data is None:
@@ -49,8 +50,8 @@
                 'Could not fetch discovery doc')
 
     infile = os.path.expanduser(args.infile) or '/dev/stdin'
-    with open(infile) as f:
-        return json.load(f)
+    with io.open(infile, encoding='utf8') as f:
+        return json.loads(util.ReplaceHomoglyphs(f.read()))
 
 
 def _GetCodegenFromFlags(args):
@@ -63,8 +64,8 @@
 
     if args.client_json:
         try:
-            with open(args.client_json) as client_json:
-                f = json.loads(client_json.read())
+            with io.open(args.client_json, encoding='utf8') as client_json:
+                f = json.loads(util.ReplaceHomoglyphs(client_json.read()))
                 web = f.get('installed', f.get('web', {}))
                 client_id = web.get('client_id')
                 client_secret = web.get('client_secret')
@@ -98,7 +99,6 @@
         discovery_doc, client_info, names, args.root_package, outdir,
         base_package=args.base_package,
         protorpc_package=args.protorpc_package,
-        generate_cli=args.generate_cli,
         init_wildcards_file=(args.init_file == 'wildcards'),
         use_proto2=args.experimental_proto2_output,
         unelidable_request_methods=args.unelidable_request_methods,
@@ -108,23 +108,21 @@
 # TODO(craigcitro): Delete this if we don't need this functionality.
 def _WriteBaseFiles(codegen):
     with util.Chdir(codegen.outdir):
-        _CopyLocalFile('app2.py')
         _CopyLocalFile('base_api.py')
-        _CopyLocalFile('base_cli.py')
         _CopyLocalFile('credentials_lib.py')
         _CopyLocalFile('exceptions.py')
 
 
 def _WriteIntermediateInit(codegen):
-    with open('__init__.py', 'w') as out:
+    with io.open('__init__.py', 'w') as out:
         codegen.WriteIntermediateInit(out)
 
 
 def _WriteProtoFiles(codegen):
     with util.Chdir(codegen.outdir):
-        with open(codegen.client_info.messages_proto_file_name, 'w') as out:
+        with io.open(codegen.client_info.messages_proto_file_name, 'w') as out:
             codegen.WriteMessagesProtoFile(out)
-        with open(codegen.client_info.services_proto_file_name, 'w') as out:
+        with io.open(codegen.client_info.services_proto_file_name, 'w') as out:
             codegen.WriteServicesProtoFile(out)
 
 
@@ -132,24 +130,20 @@
     if codegen.use_proto2:
         _WriteProtoFiles(codegen)
     with util.Chdir(codegen.outdir):
-        with open(codegen.client_info.messages_file_name, 'w') as out:
+        with io.open(codegen.client_info.messages_file_name, 'w') as out:
             codegen.WriteMessagesFile(out)
-        with open(codegen.client_info.client_file_name, 'w') as out:
+        with io.open(codegen.client_info.client_file_name, 'w') as out:
             codegen.WriteClientLibrary(out)
-        if args.generate_cli:
-            with open(codegen.client_info.cli_file_name, 'w') as out:
-                codegen.WriteCli(out)
-            os.chmod(codegen.client_info.cli_file_name, 0o755)
 
 
 def _WriteInit(codegen):
     with util.Chdir(codegen.outdir):
-        with open('__init__.py', 'w') as out:
+        with io.open('__init__.py', 'w') as out:
             codegen.WriteInit(out)
 
 
 def _WriteSetupPy(codegen):
-    with open('setup.py', 'w') as out:
+    with io.open('setup.py', 'w') as out:
         codegen.WriteSetupPy(out)
 
 
@@ -176,7 +170,6 @@
     args.outdir = os.path.join(
         args.outdir, 'apitools/clients/%s' % package)
     args.root_package = 'apitools.clients.%s' % package
-    args.generate_cli = False
     codegen = _GetCodegenFromFlags(args)
     if codegen is None:
         logging.error('Failed to create codegen, exiting.')
@@ -287,11 +280,10 @@
 
     parser.add_argument(
         '--generate_cli', dest='generate_cli', action='store_true',
-        help='If specified (default), a CLI is also generated.')
+        help='Ignored.')
     parser.add_argument(
         '--nogenerate_cli', dest='generate_cli', action='store_false',
-        help='CLI will not be generated.')
-    parser.set_defaults(generate_cli=True)
+        help='Ignored.')
 
     parser.add_argument(
         '--init-file',
@@ -346,5 +338,6 @@
     args = parser.parse_args(argv[1:])
     return args.func(args) or 0
 
+
 if __name__ == '__main__':
     sys.exit(main())
diff --git a/apitools/gen/gen_client_lib.py b/apitools/gen/gen_client_lib.py
index b910f0f..1796762 100644
--- a/apitools/gen/gen_client_lib.py
+++ b/apitools/gen/gen_client_lib.py
@@ -22,7 +22,6 @@
 
 import datetime
 
-from apitools.gen import command_registry
 from apitools.gen import message_registry
 from apitools.gen import service_registry
 from apitools.gen import util
@@ -63,8 +62,7 @@
     """Code generator for a given discovery document."""
 
     def __init__(self, discovery_doc, client_info, names, root_package, outdir,
-                 base_package, protorpc_package, generate_cli=False,
-                 init_wildcards_file=True,
+                 base_package, protorpc_package, init_wildcards_file=True,
                  use_proto2=False, unelidable_request_methods=None,
                  apitools_version=''):
         self.__discovery_doc = discovery_doc
@@ -76,7 +74,6 @@
         self.__package = self.__client_info.package
         self.__version = self.__client_info.version
         self.__revision = discovery_doc.get('revision', '1')
-        self.__generate_cli = generate_cli
         self.__init_wildcards_file = init_wildcards_file
         self.__root_package = root_package
         self.__base_files_package = base_package
@@ -104,19 +101,9 @@
         # fields from MessageFields to EnumFields.
         self.__message_registry.FixupMessageFields()
 
-        self.__command_registry = command_registry.CommandRegistry(
-            self.__package, self.__version, self.__client_info,
-            self.__message_registry, self.__root_package,
-            self.__base_files_package, self.__protorpc_package,
-            self.__names)
-        self.__command_registry.AddGlobalParameters(
-            self.__message_registry.LookupDescriptorOrDie(
-                'StandardQueryParameters'))
-
         self.__services_registry = service_registry.ServiceRegistry(
             self.__client_info,
             self.__message_registry,
-            self.__command_registry,
             self.__names,
             self.__root_package,
             self.__base_files_package,
@@ -189,9 +176,6 @@
                 import_prefix = ''
             else:
                 import_prefix = '%s.' % self.__root_package
-            if self.__generate_cli:
-                printer('from %s%s import *',
-                        import_prefix, self.__client_info.cli_rule_name)
             printer('from %s%s import *',
                     import_prefix, self.__client_info.client_rule_name)
             printer('from %s%s import *',
@@ -281,6 +265,3 @@
 
     def WriteClientLibrary(self, out):
         self.__services_registry.WriteFile(self._GetPrinter(out))
-
-    def WriteCli(self, out):
-        self.__command_registry.WriteFile(self._GetPrinter(out))
diff --git a/apitools/gen/gen_client_test.py b/apitools/gen/gen_client_test.py
index 3be3e7a..6c4e9b1 100644
--- a/apitools/gen/gen_client_test.py
+++ b/apitools/gen/gen_client_test.py
@@ -32,7 +32,6 @@
         return f.read()
 
 
-@test_utils.RunOnlyOnPython27
 class ClientGenCliTest(unittest2.TestCase):
 
     def testHelp_NotEnoughArguments(self):
@@ -47,7 +46,6 @@
         with test_utils.TempDir() as tmp_dir_path:
             gen_client.main([
                 gen_client.__file__,
-                '--generate_cli',
                 '--init-file', 'none',
                 '--infile', GetTestDataPath('dns', 'dns_v1.json'),
                 '--outdir', tmp_dir_path,
@@ -56,7 +54,6 @@
                 'client'
             ])
             expected_files = (
-                set(['dns_v1.py']) |  # CLI files
                 set(['dns_v1_client.py', 'dns_v1_messages.py']))
             self.assertEquals(expected_files, set(os.listdir(tmp_dir_path)))
 
@@ -64,7 +61,6 @@
         with test_utils.TempDir() as tmp_dir_path:
             gen_client.main([
                 gen_client.__file__,
-                '--generate_cli',
                 '--init-file', 'empty',
                 '--infile', GetTestDataPath('dns', 'dns_v1.json'),
                 '--outdir', tmp_dir_path,
@@ -73,7 +69,6 @@
                 'client'
             ])
             expected_files = (
-                set(['dns_v1.py']) |  # CLI files
                 set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']))
             self.assertEquals(expected_files, set(os.listdir(tmp_dir_path)))
             init_file = _GetContent(os.path.join(tmp_dir_path, '__init__.py'))
@@ -88,7 +83,6 @@
         with test_utils.TempDir() as tmp_dir_path:
             gen_client.main([
                 gen_client.__file__,
-                '--nogenerate_cli',
                 '--infile', GetTestDataPath('dns', 'dns_v1.json'),
                 '--outdir', tmp_dir_path,
                 '--overwrite',
@@ -104,7 +98,6 @@
         with test_utils.TempDir() as tmp_dir_path:
             gen_client.main([
                 gen_client.__file__,
-                '--nogenerate_cli',
                 '--infile', GetTestDataPath('dns', 'dns_v1.json'),
                 '--outdir', tmp_dir_path,
                 '--overwrite',
@@ -120,7 +113,6 @@
         with test_utils.TempDir() as tmp_dir_path:
             gen_client.main([
                 gen_client.__file__,
-                '--nogenerate_cli',
                 '--infile', GetTestDataPath('dns', 'dns_v1.json'),
                 '--outdir', tmp_dir_path,
                 '--overwrite',
@@ -135,7 +127,6 @@
         with test_utils.TempDir() as tmp_dir_path:
             gen_client.main([
                 gen_client.__file__,
-                '--nogenerate_cli',
                 '--infile', GetTestDataPath('dns', 'dns_v1.json'),
                 '--outdir', tmp_dir_path,
                 '--overwrite',
diff --git a/apitools/gen/message_registry.py b/apitools/gen/message_registry.py
index 4f004de..0ab44c1 100644
--- a/apitools/gen/message_registry.py
+++ b/apitools/gen/message_registry.py
@@ -169,7 +169,7 @@
     def LookupDescriptorOrDie(self, name):
         message_descriptor = self.LookupDescriptor(name)
         if message_descriptor is None:
-            raise ValueError('No message descriptor named "%s"', name)
+            raise ValueError('No message descriptor named "%s"' % name)
         return message_descriptor
 
     def __GetDescriptor(self, name):
@@ -262,7 +262,7 @@
             self.__DeclareMessageAlias(schema, 'extra_types.JsonValue')
             return
         if schema.get('type') != 'object':
-            raise ValueError('Cannot create message descriptors for type %s',
+            raise ValueError('Cannot create message descriptors for type %s' %
                              schema.get('type'))
         message = extended_descriptor.ExtendedMessageDescriptor()
         message.name = self.__names.ClassName(schema['id'])
diff --git a/apitools/gen/service_registry.py b/apitools/gen/service_registry.py
index 9f71592..e47b050 100644
--- a/apitools/gen/service_registry.py
+++ b/apitools/gen/service_registry.py
@@ -34,7 +34,7 @@
 
     """Registry for service types."""
 
-    def __init__(self, client_info, message_registry, command_registry,
+    def __init__(self, client_info, message_registry,
                  names, root_package, base_files_package,
                  unelidable_request_methods):
         self.__client_info = client_info
@@ -42,7 +42,6 @@
         self.__names = names
         self.__service_method_info_map = collections.OrderedDict()
         self.__message_registry = message_registry
-        self.__command_registry = command_registry
         self.__root_package = root_package
         self.__base_files_package = base_files_package
         self.__unelidable_request_methods = unelidable_request_methods
@@ -71,7 +70,7 @@
         else:
             description = '%s method for the %s service.' % (method_name, name)
         with printer.CommentContext():
-            printer('"""%s' % description)
+            printer('r"""%s' % description)
         printer()
         printer('Args:')
         printer('  request: (%s) input message', method_info.request_type_name)
@@ -238,7 +237,8 @@
                 printer('get_credentials=True, http=None, model=None,')
                 printer('log_request=False, log_response=False,')
                 printer('credentials_args=None, default_global_params=None,')
-                printer('additional_http_headers=None):')
+                printer('additional_http_headers=None, '
+                        'response_encoding=None):')
             with printer.Indent():
                 printer('"""Create a new %s handle."""', client_info.package)
                 printer('url = url or self.BASE_URL')
@@ -251,7 +251,8 @@
                         'log_response=log_response,')
                 printer('    credentials_args=credentials_args,')
                 printer('    default_global_params=default_global_params,')
-                printer('    additional_http_headers=additional_http_headers)')
+                printer('    additional_http_headers=additional_http_headers,')
+                printer('    response_encoding=response_encoding)')
                 for name in self.__service_method_info_map.keys():
                     printer('self.%s = self.%s(self)',
                             name, self.__GetServiceClassName(name))
@@ -442,6 +443,7 @@
 
     def AddServiceFromResource(self, service_name, methods):
         """Add a new service named service_name with the given methods."""
+        service_name = self.__names.CleanName(service_name)
         method_descriptions = methods.get('methods', {})
         method_info_map = collections.OrderedDict()
         items = sorted(method_descriptions.items())
@@ -471,9 +473,6 @@
 
             method_info_map[method_name] = self.__ComputeMethodInfo(
                 method_description, request, response, request_field)
-            self.__command_registry.AddCommandForMethod(
-                service_name, method_name, method_info_map[method_name],
-                request, response)
 
         nested_services = methods.get('resources', {})
         services = sorted(nested_services.items())
diff --git a/apitools/gen/test_utils.py b/apitools/gen/test_utils.py
index 59eea51..484dcbc 100644
--- a/apitools/gen/test_utils.py
+++ b/apitools/gen/test_utils.py
@@ -17,17 +17,14 @@
 
 import contextlib
 import os
-import tempfile
 import shutil
 import sys
+import tempfile
 
 import six
 import unittest2
 
 
-RunOnlyOnPython27 = unittest2.skipUnless(
-    sys.version_info[:2] == (2, 7), 'Only runs in Python 2.7')
-
 SkipOnWindows = unittest2.skipIf(
     os.name == 'nt', 'Does not run on windows')
 
diff --git a/apitools/gen/util.py b/apitools/gen/util.py
index 146b452..680d84a 100644
--- a/apitools/gen/util.py
+++ b/apitools/gen/util.py
@@ -16,14 +16,17 @@
 
 """Assorted utilities shared between parts of apitools."""
 from __future__ import print_function
+from __future__ import unicode_literals
 
 import collections
 import contextlib
+import gzip
 import json
 import keyword
 import logging
 import os
 import re
+import tempfile
 
 import six
 from six.moves import urllib_parse
@@ -223,14 +226,6 @@
         return self.package
 
     @property
-    def cli_rule_name(self):
-        return '%s_%s' % (self.package, self.version)
-
-    @property
-    def cli_file_name(self):
-        return '%s.py' % self.cli_rule_name
-
-    @property
     def client_rule_name(self):
         return '%s_%s_client' % (self.package, self.version)
 
@@ -259,10 +254,51 @@
         return '%s.proto' % self.services_rule_name
 
 
+def ReplaceHomoglyphs(s):
+    """Returns s with unicode homoglyphs replaced by ascii equivalents."""
+    homoglyphs = {
+        '\xa0': ' ',  # &nbsp; ?
+        '\u00e3': '',  # TODO(gsfowler) drop after .proto spurious char elided
+        '\u00a0': ' ',  # &nbsp; ?
+        '\u00a9': '(C)',  # COPYRIGHT SIGN (would you believe "asciiglyph"?)
+        '\u00ae': '(R)',  # REGISTERED SIGN (would you believe "asciiglyph"?)
+        '\u2014': '-',  # EM DASH
+        '\u2018': "'",  # LEFT SINGLE QUOTATION MARK
+        '\u2019': "'",  # RIGHT SINGLE QUOTATION MARK
+        '\u201c': '"',  # LEFT DOUBLE QUOTATION MARK
+        '\u201d': '"',  # RIGHT DOUBLE QUOTATION MARK
+        '\u2026': '...',  # HORIZONTAL ELLIPSIS
+        '\u2e3a': '-',  # TWO-EM DASH
+    }
+
+    def _ReplaceOne(c):
+        """Returns the homoglyph or escaped replacement for c."""
+        equiv = homoglyphs.get(c)
+        if equiv is not None:
+            return equiv
+        try:
+            c.encode('ascii')
+            return c
+        except UnicodeError:
+            pass
+        try:
+            return c.encode('unicode-escape').decode('ascii')
+        except UnicodeError:
+            return '?'
+
+    return ''.join([_ReplaceOne(c) for c in s])
+
+
 def CleanDescription(description):
     """Return a version of description safe for printing in a docstring."""
     if not isinstance(description, six.string_types):
         return description
+    if six.PY3:
+        # https://docs.python.org/3/reference/lexical_analysis.html#index-18
+        description = description.replace('\\N', '\\\\N')
+        description = description.replace('\\u', '\\\\u')
+        description = description.replace('\\U', '\\\\U')
+    description = ReplaceHomoglyphs(description)
     return description.replace('"""', '" " "')
 
 
@@ -306,8 +342,12 @@
                 line = (args[0] % args[1:]).rstrip()
             else:
                 line = args[0].rstrip()
-            line = line.encode('ascii', 'backslashreplace')
-            print('%s%s' % (self.__indent, line), file=self.__out)
+            line = ReplaceHomoglyphs(line)
+            try:
+                print('%s%s' % (self.__indent, line), file=self.__out)
+            except UnicodeEncodeError:
+                line = line.encode('ascii', 'backslashreplace').decode('ascii')
+                print('%s%s' % (self.__indent, line), file=self.__out)
         else:
             print('', file=self.__out)
 
@@ -327,6 +367,30 @@
     ]
 
 
+def _Gunzip(gzipped_content):
+    """Returns gunzipped content from gzipped contents."""
+    f = tempfile.NamedTemporaryFile(suffix='gz', mode='w+b', delete=False)
+    try:
+        f.write(gzipped_content)
+        f.close()  # force file synchronization
+        with gzip.open(f.name, 'rb') as h:
+            decompressed_content = h.read()
+        return decompressed_content
+    finally:
+        os.unlink(f.name)
+
+
+def _GetURLContent(url):
+    """Download and return the content of URL."""
+    response = urllib_request.urlopen(url)
+    encoding = response.info().get('Content-Encoding')
+    if encoding == 'gzip':
+        content = _Gunzip(response.read())
+    else:
+        content = response.read()
+    return content
+
+
 def FetchDiscoveryDoc(discovery_url, retries=5):
     """Fetch the discovery document at the given url."""
     discovery_urls = _NormalizeDiscoveryUrls(discovery_url)
@@ -335,7 +399,10 @@
     for url in discovery_urls:
         for _ in range(retries):
             try:
-                discovery_doc = json.loads(urllib_request.urlopen(url).read())
+                content = _GetURLContent(url)
+                if isinstance(content, bytes):
+                    content = content.decode('utf8')
+                discovery_doc = json.loads(content)
                 break
             except (urllib_error.HTTPError, urllib_error.URLError) as e:
                 logging.info(
diff --git a/apitools/gen/util_test.py b/apitools/gen/util_test.py
index 7cb0739..7668b53 100644
--- a/apitools/gen/util_test.py
+++ b/apitools/gen/util_test.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
 #
 # Copyright 2015 Google Inc.
 #
@@ -14,9 +16,15 @@
 # limitations under the License.
 
 """Tests for util."""
+import codecs
+import gzip
+import os
+import six.moves.urllib.request as urllib_request
+import tempfile
 import unittest2
 
 from apitools.gen import util
+from mock import patch
 
 
 class NormalizeVersionTest(unittest2.TestCase):
@@ -37,3 +45,55 @@
     def testNormalizeEnumName(self):
         names = util.Names([''])
         self.assertEqual('_0', names.NormalizeEnumName('0'))
+
+
+class MockRequestResponse():
+    """Mocks the behavior of urllib.response."""
+
+    class MockRequestEncoding():
+        def __init__(self, encoding):
+            self.encoding = encoding
+
+        def get(self, _):
+            return self.encoding
+
+    def __init__(self, content, encoding):
+        self.content = content
+        self.encoding = MockRequestResponse.MockRequestEncoding(encoding)
+
+    def info(self):
+        return self.encoding
+
+    def read(self):
+        return self.content
+
+
+def _Gzip(raw_content):
+    """Returns gzipped content from any content."""
+    f = tempfile.NamedTemporaryFile(suffix='gz', mode='wb', delete=False)
+    f.close()
+    try:
+        with gzip.open(f.name, 'wb') as h:
+            h.write(raw_content)
+        with open(f.name, 'rb') as h:
+            return h.read()
+    finally:
+        os.unlink(f.name)
+
+
+class GetURLContentTest(unittest2.TestCase):
+
+    def testUnspecifiedContentEncoding(self):
+        data = 'regular non-gzipped content'
+        with patch.object(urllib_request, 'urlopen',
+                          return_value=MockRequestResponse(data, '')):
+            self.assertEqual(data, util._GetURLContent('unused_url_parameter'))
+
+    def testGZippedContent(self):
+        data = u'¿Hola qué tal?'
+        compressed_data = _Gzip(data.encode('utf-8'))
+        with patch.object(urllib_request, 'urlopen',
+                          return_value=MockRequestResponse(
+                              compressed_data, 'gzip')):
+            self.assertEqual(data, util._GetURLContent(
+                'unused_url_parameter').decode('utf-8'))
diff --git a/apitools/scripts/oauth2l.py b/apitools/scripts/oauth2l.py
deleted file mode 100644
index cddba0a..0000000
--- a/apitools/scripts/oauth2l.py
+++ /dev/null
@@ -1,339 +0,0 @@
-#
-# Copyright 2015 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Command-line utility for fetching/inspecting credentials.
-
-oauth2l (pronounced "oauthtool") is a small utility for fetching
-credentials, or inspecting existing credentials. Here we demonstrate
-some sample use:
-
-    $ oauth2l fetch userinfo.email bigquery compute
-    Fetched credentials of type:
-      oauth2client.client.OAuth2Credentials
-    Access token:
-      ya29.abcdefghijklmnopqrstuvwxyz123yessirree
-    $ oauth2l header userinfo.email
-    Authorization: Bearer ya29.zyxwvutsrqpnmolkjihgfedcba
-    $ oauth2l validate thisisnotatoken
-    <exit status: 1>
-    $ oauth2l validate ya29.zyxwvutsrqpnmolkjihgfedcba
-    $ oauth2l scopes ya29.abcdefghijklmnopqrstuvwxyz123yessirree
-    https://www.googleapis.com/auth/bigquery
-    https://www.googleapis.com/auth/compute
-    https://www.googleapis.com/auth/userinfo.email
-
-The `header` command is designed to be easy to use with `curl`:
-
-    $ curl -H "$(oauth2l header bigquery)" \\
-      'https://www.googleapis.com/bigquery/v2/projects'
-    ... lists all projects ...
-
-The token can also be printed in other formats, for easy chaining
-into other programs:
-
-    $ oauth2l fetch -f json_compact userinfo.email
-    <one-line JSON object with credential information>
-    $ oauth2l fetch -f bare drive
-    ya29.suchT0kenManyCredentialsW0Wokyougetthepoint
-
-"""
-
-from __future__ import print_function
-
-import argparse
-import json
-import logging
-import os
-import pkgutil
-import sys
-import textwrap
-
-import oauth2client.client
-from six.moves import http_client
-
-import apitools.base.py as apitools_base
-
-# We could use a generated client here, but it's used for precisely
-# one URL, with one parameter and no worries about URL encoding. Let's
-# go with simple.
-_OAUTH2_TOKENINFO_TEMPLATE = (
-    'https://www.googleapis.com/oauth2/v2/tokeninfo'
-    '?access_token={access_token}'
-)
-
-
-def GetDefaultClientInfo():
-    client_secrets_json = pkgutil.get_data(
-        'apitools.data', 'apitools_client_secrets.json').decode('utf8')
-    client_secrets = json.loads(client_secrets_json)['installed']
-    return {
-        'client_id': client_secrets['client_id'],
-        'client_secret': client_secrets['client_secret'],
-        'user_agent': 'apitools/0.2 oauth2l/0.1',
-    }
-
-
-def GetClientInfoFromFlags(client_secrets):
-    """Fetch client info from args."""
-    if client_secrets:
-        client_secrets_path = os.path.expanduser(client_secrets)
-        if not os.path.exists(client_secrets_path):
-            raise ValueError(
-                'Cannot find file: {0}'.format(client_secrets))
-        with open(client_secrets_path) as client_secrets_file:
-            client_secrets = json.load(client_secrets_file)
-        if 'installed' not in client_secrets:
-            raise ValueError('Provided client ID must be for an installed app')
-        client_secrets = client_secrets['installed']
-        return {
-            'client_id': client_secrets['client_id'],
-            'client_secret': client_secrets['client_secret'],
-            'user_agent': 'apitools/0.2 oauth2l/0.1',
-        }
-    else:
-        return GetDefaultClientInfo()
-
-
-def _ExpandScopes(scopes):
-    scope_prefix = 'https://www.googleapis.com/auth/'
-    return [s if s.startswith('https://') else scope_prefix + s
-            for s in scopes]
-
-
-def _PrettyJson(data):
-    return json.dumps(data, sort_keys=True, indent=4, separators=(',', ': '))
-
-
-def _CompactJson(data):
-    return json.dumps(data, sort_keys=True, separators=(',', ':'))
-
-
-def _AsText(text_or_bytes):
-    if isinstance(text_or_bytes, bytes):
-        return text_or_bytes.decode('utf8')
-    return text_or_bytes
-
-
-def _Format(fmt, credentials):
-    """Format credentials according to fmt."""
-    if fmt == 'bare':
-        return credentials.access_token
-    elif fmt == 'header':
-        return 'Authorization: Bearer %s' % credentials.access_token
-    elif fmt == 'json':
-        return _PrettyJson(json.loads(_AsText(credentials.to_json())))
-    elif fmt == 'json_compact':
-        return _CompactJson(json.loads(_AsText(credentials.to_json())))
-    elif fmt == 'pretty':
-        format_str = textwrap.dedent('\n'.join([
-            'Fetched credentials of type:',
-            '  {credentials_type.__module__}.{credentials_type.__name__}',
-            'Access token:',
-            '  {credentials.access_token}',
-        ]))
-        return format_str.format(credentials=credentials,
-                                 credentials_type=type(credentials))
-    raise ValueError('Unknown format: {0}'.format(fmt))
-
-_FORMATS = set(('bare', 'header', 'json', 'json_compact', 'pretty'))
-
-
-def _GetTokenScopes(access_token):
-    """Return the list of valid scopes for the given token as a list."""
-    url = _OAUTH2_TOKENINFO_TEMPLATE.format(access_token=access_token)
-    response = apitools_base.MakeRequest(
-        apitools_base.GetHttp(), apitools_base.Request(url))
-    if response.status_code not in [http_client.OK, http_client.BAD_REQUEST]:
-        raise apitools_base.HttpError.FromResponse(response)
-    if response.status_code == http_client.BAD_REQUEST:
-        return []
-    return json.loads(_AsText(response.content))['scope'].split(' ')
-
-
-def _ValidateToken(access_token):
-    """Return True iff the provided access token is valid."""
-    return bool(_GetTokenScopes(access_token))
-
-
-def _FetchCredentials(args, client_info=None, credentials_filename=None):
-    """Fetch a credential for the given client_info and scopes."""
-    client_info = client_info or GetClientInfoFromFlags(args.client_secrets)
-    scopes = _ExpandScopes(args.scope)
-    if not scopes:
-        raise ValueError('No scopes provided')
-    credentials_filename = credentials_filename or args.credentials_filename
-    # TODO(craigcitro): Remove this logging nonsense once we quiet the
-    # spurious logging in oauth2client.
-    old_level = logging.getLogger().level
-    logging.getLogger().setLevel(logging.ERROR)
-    credentials = apitools_base.GetCredentials(
-        'oauth2l', scopes, credentials_filename=credentials_filename,
-        service_account_json_keyfile=args.service_account_json_keyfile,
-        oauth2client_args='', **client_info)
-    logging.getLogger().setLevel(old_level)
-    if not _ValidateToken(credentials.access_token):
-        credentials.refresh(apitools_base.GetHttp())
-    return credentials
-
-
-def _Email(args):
-    """Print the email address for this token, if possible."""
-    userinfo = apitools_base.GetUserinfo(
-        oauth2client.client.AccessTokenCredentials(args.access_token,
-                                                   'oauth2l/1.0'))
-    user_email = userinfo.get('email')
-    if user_email:
-        print(user_email)
-
-
-def _Fetch(args):
-    """Fetch a valid access token and display it."""
-    credentials = _FetchCredentials(args)
-    print(_Format(args.credentials_format.lower(), credentials))
-
-
-def _Header(args):
-    """Fetch an access token and display it formatted as an HTTP header."""
-    print(_Format('header', _FetchCredentials(args)))
-
-
-def _Scopes(args):
-    """Print the list of scopes for a valid token."""
-    scopes = _GetTokenScopes(args.access_token)
-    if not scopes:
-        return 1
-    for scope in sorted(scopes):
-        print(scope)
-
-
-def _Userinfo(args):
-    """Print the userinfo for this token, if possible."""
-    userinfo = apitools_base.GetUserinfo(
-        oauth2client.client.AccessTokenCredentials(args.access_token,
-                                                   'oauth2l/1.0'))
-    if args.format == 'json':
-        print(_PrettyJson(userinfo))
-    else:
-        print(_CompactJson(userinfo))
-
-
-def _Validate(args):
-    """Validate an access token. Exits with 0 if valid, 1 otherwise."""
-    return 1 - (_ValidateToken(args.access_token))
-
-
-def _GetParser():
-    """Returns argparse argument parser."""
-    shared_flags = argparse.ArgumentParser(add_help=False)
-    shared_flags.add_argument(
-        '--client_secrets',
-        default='',
-        help=('If specified, use the client ID/secret from the named '
-              'file, which should be a client_secrets.json file '
-              'downloaded from the Developer Console.'))
-    shared_flags.add_argument(
-        '--credentials_filename',
-        default='',
-        help='(optional) Filename for fetching/storing credentials.')
-    shared_flags.add_argument(
-        '--service_account_json_keyfile',
-        default='',
-        help=('Filename for a JSON service account key downloaded from '
-              'the Google Developer Console.'))
-
-    parser = argparse.ArgumentParser(
-        description=__doc__,
-        formatter_class=argparse.RawDescriptionHelpFormatter,
-    )
-    subparsers = parser.add_subparsers(dest='command')
-
-    # email
-    email = subparsers.add_parser('email', help=_Email.__doc__,
-                                  parents=[shared_flags])
-    email.set_defaults(func=_Email)
-    email.add_argument(
-        'access_token',
-        help=('Access token to print associated email address for. Must have '
-              'the userinfo.email scope.'))
-
-    # fetch
-    fetch = subparsers.add_parser('fetch', help=_Fetch.__doc__,
-                                  parents=[shared_flags])
-    fetch.set_defaults(func=_Fetch)
-    fetch.add_argument(
-        '-f', '--credentials_format',
-        default='pretty', choices=sorted(_FORMATS),
-        help='Output format for token.')
-    fetch.add_argument(
-        'scope',
-        nargs='*',
-        help='Scope to fetch. May be provided multiple times.')
-
-    # header
-    header = subparsers.add_parser('header', help=_Header.__doc__,
-                                   parents=[shared_flags])
-    header.set_defaults(func=_Header)
-    header.add_argument(
-        'scope',
-        nargs='*',
-        help='Scope to header. May be provided multiple times.')
-
-    # scopes
-    scopes = subparsers.add_parser('scopes', help=_Scopes.__doc__,
-                                   parents=[shared_flags])
-    scopes.set_defaults(func=_Scopes)
-    scopes.add_argument(
-        'access_token',
-        help=('Scopes associated with this token will be printed.'))
-
-    # userinfo
-    userinfo = subparsers.add_parser('userinfo', help=_Userinfo.__doc__,
-                                     parents=[shared_flags])
-    userinfo.set_defaults(func=_Userinfo)
-    userinfo.add_argument(
-        '-f', '--format',
-        default='json', choices=('json', 'json_compact'),
-        help='Output format for userinfo.')
-    userinfo.add_argument(
-        'access_token',
-        help=('Access token to print associated email address for. Must have '
-              'the userinfo.email scope.'))
-
-    # validate
-    validate = subparsers.add_parser('validate', help=_Validate.__doc__,
-                                     parents=[shared_flags])
-    validate.set_defaults(func=_Validate)
-    validate.add_argument(
-        'access_token',
-        help='Access token to validate.')
-
-    return parser
-
-
-def main(argv=None):
-    argv = argv or sys.argv
-    # Invoke the newly created parser.
-    args = _GetParser().parse_args(argv[1:])
-    try:
-        exit_code = args.func(args)
-    except BaseException as e:
-        print('Error encountered in {0} operation: {1}'.format(
-            args.command, e))
-        return 1
-    return exit_code
-
-
-if __name__ == '__main__':
-    sys.exit(main(sys.argv))
diff --git a/apitools/scripts/oauth2l_test.py b/apitools/scripts/oauth2l_test.py
deleted file mode 100644
index 157eb3a..0000000
--- a/apitools/scripts/oauth2l_test.py
+++ /dev/null
@@ -1,348 +0,0 @@
-#
-# Copyright 2015 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Tests for oauth2l."""
-
-import json
-import os
-import sys
-
-import mock
-import oauth2client.client
-import six
-from six.moves import http_client
-import unittest2
-
-import apitools.base.py as apitools_base
-from apitools.scripts import oauth2l
-
-_OAUTH2L_MAIN_RUN = False
-
-
-class _FakeResponse(object):
-
-    def __init__(self, status_code, scopes=None):
-        self.status_code = status_code
-        if self.status_code == http_client.OK:
-            self.content = json.dumps({'scope': ' '.join(scopes or [])})
-        else:
-            self.content = 'Error'
-            self.info = str(http_client.responses[self.status_code])
-            self.request_url = 'some-url'
-
-
-def _GetCommandOutput(command_name, command_argv):
-    orig_stdout = sys.stdout
-    orig_stderr = sys.stderr
-    new_stdout = six.StringIO()
-    new_stderr = six.StringIO()
-    try:
-        sys.stdout = new_stdout
-        sys.stderr = new_stderr
-        oauth2l.main(['oauth2l', command_name] + command_argv)
-    finally:
-        sys.stdout = orig_stdout
-        sys.stderr = orig_stderr
-    new_stdout.seek(0)
-    return new_stdout.getvalue().rstrip()
-
-
-class InvalidCommandTest(unittest2.TestCase):
-
-    def testOutput(self):
-        self.assertRaises(SystemExit,
-                          _GetCommandOutput, 'foo', [])
-
-
-class Oauth2lFormattingTest(unittest2.TestCase):
-
-    def setUp(self):
-        # Set up an access token to use
-        self.access_token = 'ya29.abdefghijklmnopqrstuvwxyz'
-        self.user_agent = 'oauth2l/1.0'
-        self.credentials = oauth2client.client.AccessTokenCredentials(
-            self.access_token, self.user_agent)
-
-    def _Args(self, credentials_format):
-        return ['--credentials_format=' + credentials_format, 'userinfo.email']
-
-    def testFormatBare(self):
-        with mock.patch.object(oauth2l, '_FetchCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_credentials:
-            output = _GetCommandOutput('fetch', self._Args('bare'))
-            self.assertEqual(self.access_token, output)
-            self.assertEqual(1, mock_credentials.call_count)
-
-    def testFormatHeader(self):
-        with mock.patch.object(oauth2l, '_FetchCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_credentials:
-            output = _GetCommandOutput('fetch', self._Args('header'))
-            header = 'Authorization: Bearer %s' % self.access_token
-            self.assertEqual(header, output)
-            self.assertEqual(1, mock_credentials.call_count)
-
-    def testHeaderCommand(self):
-        with mock.patch.object(oauth2l, '_FetchCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_credentials:
-            output = _GetCommandOutput('header', ['userinfo.email'])
-            header = 'Authorization: Bearer %s' % self.access_token
-            self.assertEqual(header, output)
-            self.assertEqual(1, mock_credentials.call_count)
-
-    def testFormatJson(self):
-        with mock.patch.object(oauth2l, '_FetchCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_credentials:
-            output = _GetCommandOutput('fetch', self._Args('json'))
-            output_lines = [l.strip() for l in output.splitlines()]
-            expected_lines = [
-                '"_class": "AccessTokenCredentials",',
-                '"access_token": "%s",' % self.access_token,
-            ]
-            for line in expected_lines:
-                self.assertIn(line, output_lines)
-            self.assertEqual(1, mock_credentials.call_count)
-
-    def testFormatJsonCompact(self):
-        with mock.patch.object(oauth2l, '_FetchCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_credentials:
-            output = _GetCommandOutput('fetch', self._Args('json_compact'))
-            expected_clauses = [
-                '"_class":"AccessTokenCredentials",',
-                '"access_token":"%s",' % self.access_token,
-            ]
-            for clause in expected_clauses:
-                self.assertIn(clause, output)
-            self.assertEqual(1, len(output.splitlines()))
-            self.assertEqual(1, mock_credentials.call_count)
-
-    def testFormatPretty(self):
-        with mock.patch.object(oauth2l, '_FetchCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_credentials:
-            output = _GetCommandOutput('fetch', self._Args('pretty'))
-            expecteds = ['oauth2client.client.AccessTokenCredentials',
-                         self.access_token]
-            for expected in expecteds:
-                self.assertIn(expected, output)
-            self.assertEqual(1, mock_credentials.call_count)
-
-    def testFakeFormat(self):
-        self.assertRaises(ValueError,
-                          oauth2l._Format, 'xml', self.credentials)
-
-
-class TestFetch(unittest2.TestCase):
-
-    def setUp(self):
-        # Set up an access token to use
-        self.access_token = 'ya29.abdefghijklmnopqrstuvwxyz'
-        self.user_agent = 'oauth2l/1.0'
-        self.credentials = oauth2client.client.AccessTokenCredentials(
-            self.access_token, self.user_agent)
-
-    def testNoScopes(self):
-        output = _GetCommandOutput('fetch', [])
-        self.assertEqual(
-            'Error encountered in fetch operation: No scopes provided',
-            output)
-
-    def testScopes(self):
-        expected_scopes = [
-            'https://www.googleapis.com/auth/userinfo.email',
-            'https://www.googleapis.com/auth/cloud-platform',
-        ]
-        with mock.patch.object(apitools_base, 'GetCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_fetch:
-            with mock.patch.object(oauth2l, '_GetTokenScopes',
-                                   return_value=expected_scopes,
-                                   autospec=True) as mock_get_scopes:
-                output = _GetCommandOutput(
-                    'fetch', ['userinfo.email', 'cloud-platform'])
-                self.assertIn(self.access_token, output)
-                self.assertEqual(1, mock_fetch.call_count)
-                args, _ = mock_fetch.call_args
-                self.assertEqual(expected_scopes, args[-1])
-                self.assertEqual(1, mock_get_scopes.call_count)
-                self.assertEqual((self.access_token,),
-                                 mock_get_scopes.call_args[0])
-
-    def testCredentialsRefreshed(self):
-        with mock.patch.object(apitools_base, 'GetCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_fetch:
-            with mock.patch.object(oauth2l, '_ValidateToken',
-                                   return_value=False,
-                                   autospec=True) as mock_validate:
-                with mock.patch.object(self.credentials, 'refresh',
-                                       return_value=None,
-                                       autospec=True) as mock_refresh:
-                    output = _GetCommandOutput('fetch', ['userinfo.email'])
-                    self.assertIn(self.access_token, output)
-                    self.assertEqual(1, mock_fetch.call_count)
-                    self.assertEqual(1, mock_validate.call_count)
-                    self.assertEqual(1, mock_refresh.call_count)
-
-    def testDefaultClientInfo(self):
-        with mock.patch.object(apitools_base, 'GetCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_fetch:
-            with mock.patch.object(oauth2l, '_ValidateToken',
-                                   return_value=True,
-                                   autospec=True) as mock_validate:
-                output = _GetCommandOutput('fetch', ['userinfo.email'])
-                self.assertIn(self.access_token, output)
-                self.assertEqual(1, mock_fetch.call_count)
-                _, kwargs = mock_fetch.call_args
-                self.assertEqual(
-                    '1042881264118.apps.googleusercontent.com',
-                    kwargs['client_id'])
-                self.assertEqual(1, mock_validate.call_count)
-
-    def testMissingClientSecrets(self):
-        self.assertRaises(
-            ValueError,
-            oauth2l.GetClientInfoFromFlags, '/non/existent/file')
-
-    def testWrongClientSecretsFormat(self):
-        client_secrets = os.path.join(
-            os.path.dirname(__file__),
-            'testdata/noninstalled_client_secrets.json')
-        self.assertRaises(
-            ValueError,
-            oauth2l.GetClientInfoFromFlags, client_secrets)
-
-    def testCustomClientInfo(self):
-        client_secrets_path = os.path.join(
-            os.path.dirname(__file__), 'testdata/fake_client_secrets.json')
-        with mock.patch.object(apitools_base, 'GetCredentials',
-                               return_value=self.credentials,
-                               autospec=True) as mock_fetch:
-            with mock.patch.object(oauth2l, '_ValidateToken',
-                                   return_value=True,
-                                   autospec=True) as mock_validate:
-                fetch_args = [
-                    '--client_secrets=' + client_secrets_path,
-                    'userinfo.email']
-                output = _GetCommandOutput('fetch', fetch_args)
-                self.assertIn(self.access_token, output)
-                self.assertEqual(1, mock_fetch.call_count)
-                _, kwargs = mock_fetch.call_args
-                self.assertEqual('144169.apps.googleusercontent.com',
-                                 kwargs['client_id'])
-                self.assertEqual('awesomesecret',
-                                 kwargs['client_secret'])
-                self.assertEqual(1, mock_validate.call_count)
-
-
-class TestOtherCommands(unittest2.TestCase):
-
-    def setUp(self):
-        # Set up an access token to use
-        self.access_token = 'ya29.abdefghijklmnopqrstuvwxyz'
-        self.user_agent = 'oauth2l/1.0'
-        self.credentials = oauth2client.client.AccessTokenCredentials(
-            self.access_token, self.user_agent)
-
-    def testEmail(self):
-        user_info = {'email': 'foo@example.com'}
-        with mock.patch.object(apitools_base, 'GetUserinfo',
-                               return_value=user_info,
-                               autospec=True) as mock_get_userinfo:
-            output = _GetCommandOutput('email', [self.access_token])
-            self.assertEqual(user_info['email'], output)
-            self.assertEqual(1, mock_get_userinfo.call_count)
-            self.assertEqual(self.access_token,
-                             mock_get_userinfo.call_args[0][0].access_token)
-
-    def testNoEmail(self):
-        with mock.patch.object(apitools_base, 'GetUserinfo',
-                               return_value={},
-                               autospec=True) as mock_get_userinfo:
-            output = _GetCommandOutput('email', [self.access_token])
-            self.assertEqual('', output)
-            self.assertEqual(1, mock_get_userinfo.call_count)
-
-    def testUserinfo(self):
-        user_info = {'email': 'foo@example.com'}
-        with mock.patch.object(apitools_base, 'GetUserinfo',
-                               return_value=user_info,
-                               autospec=True) as mock_get_userinfo:
-            output = _GetCommandOutput('userinfo', [self.access_token])
-            self.assertEqual(json.dumps(user_info, indent=4), output)
-            self.assertEqual(1, mock_get_userinfo.call_count)
-            self.assertEqual(self.access_token,
-                             mock_get_userinfo.call_args[0][0].access_token)
-
-    def testUserinfoCompact(self):
-        user_info = {'email': 'foo@example.com'}
-        with mock.patch.object(apitools_base, 'GetUserinfo',
-                               return_value=user_info,
-                               autospec=True) as mock_get_userinfo:
-            output = _GetCommandOutput(
-                'userinfo', ['--format=json_compact', self.access_token])
-            self.assertEqual(json.dumps(user_info, separators=(',', ':')),
-                             output)
-            self.assertEqual(1, mock_get_userinfo.call_count)
-            self.assertEqual(self.access_token,
-                             mock_get_userinfo.call_args[0][0].access_token)
-
-    def testScopes(self):
-        scopes = [u'https://www.googleapis.com/auth/userinfo.email',
-                  u'https://www.googleapis.com/auth/cloud-platform']
-        response = _FakeResponse(http_client.OK, scopes=scopes)
-        with mock.patch.object(apitools_base, 'MakeRequest',
-                               return_value=response,
-                               autospec=True) as mock_make_request:
-            output = _GetCommandOutput('scopes', [self.access_token])
-            self.assertEqual(sorted(scopes), output.splitlines())
-            self.assertEqual(1, mock_make_request.call_count)
-
-    def testValidate(self):
-        scopes = [u'https://www.googleapis.com/auth/userinfo.email',
-                  u'https://www.googleapis.com/auth/cloud-platform']
-        response = _FakeResponse(http_client.OK, scopes=scopes)
-        with mock.patch.object(apitools_base, 'MakeRequest',
-                               return_value=response,
-                               autospec=True) as mock_make_request:
-            output = _GetCommandOutput('validate', [self.access_token])
-            self.assertEqual('', output)
-            self.assertEqual(1, mock_make_request.call_count)
-
-    def testBadResponseCode(self):
-        response = _FakeResponse(http_client.BAD_REQUEST)
-        with mock.patch.object(apitools_base, 'MakeRequest',
-                               return_value=response,
-                               autospec=True) as mock_make_request:
-            output = _GetCommandOutput('scopes', [self.access_token])
-            self.assertEqual('', output)
-            self.assertEqual(1, mock_make_request.call_count)
-
-    def testUnexpectedResponseCode(self):
-        response = _FakeResponse(http_client.INTERNAL_SERVER_ERROR)
-        with mock.patch.object(apitools_base, 'MakeRequest',
-                               return_value=response,
-                               autospec=True) as mock_make_request:
-            output = _GetCommandOutput('scopes', [self.access_token])
-            self.assertIn(str(http_client.responses[response.status_code]),
-                          output)
-            self.assertIn('Error encountered in scopes operation: HttpError',
-                          output)
-            self.assertEqual(1, mock_make_request.call_count)
diff --git a/apitools/scripts/testdata/fake_client_secrets.json b/apitools/scripts/testdata/fake_client_secrets.json
index f1fabe6..b5b5090 100644
--- a/apitools/scripts/testdata/fake_client_secrets.json
+++ b/apitools/scripts/testdata/fake_client_secrets.json
@@ -10,6 +10,6 @@
       "urn:ietf:wg:oauth:2.0:oob",
       "oob"
     ],
-    "token_uri": "https://accounts.google.com/o/oauth2/token"
+    "token_uri": "https://oauth2.googleapis.com/token"
   }
 }
diff --git a/default.pylintrc b/default.pylintrc
deleted file mode 100644
index 7b9c3c4..0000000
--- a/default.pylintrc
+++ /dev/null
@@ -1,352 +0,0 @@
-# PyLint config for apitools code.
-#
-# NOTES:
-#
-# - Rules for test / demo code are generated into 'pylintrc_reduced'
-#   as deltas from this configuration by the 'run_pylint.py' script.
-#
-# - 'RATIONALE:  API mapping' as a defense for non-default settings is
-#   based on the fact that this library maps APIs which are outside our
-#   control, and adhering to the out-of-the-box defaults would induce
-#   breakage / complexity in those mappings
-#
-[MASTER]
-
-# Specify a configuration file.
-# DEFAULT:  rcfile=
-
-# Python code to execute, usually for sys.path manipulation such as
-# pygtk.require().
-# DEFAULT: init-hook=
-
-# Profiled execution.
-# DEFAULT:  profile=no
-
-# Add files or directories to the blacklist. They should be base names, not
-# paths.
-# DEFAULT:  ignore=CVS
-# NOTE: This path must be relative due to the use of
-#       os.walk in astroid.modutils.get_module_files.
-
-# Pickle collected data for later comparisons.
-# DEFAULT:  persistent=yes
-
-# List of plugins (as comma separated values of python modules names) to load,
-# usually to register additional checkers.
-# DEFAULT:  load-plugins=
-
-# DEPRECATED
-# DEFAULT:  include-ids=no
-
-# DEPRECATED
-# DEFAULT:  symbols=no
-
-
-[MESSAGES CONTROL]
-
-# TODO: remove cyclic-import.
-disable =
-    cyclic-import,
-    fixme,
-    import-error,
-    locally-disabled,
-    locally-enabled,
-    no-member,
-    no-name-in-module,
-    no-self-use,
-    super-on-old-class,
-    too-many-arguments,
-    too-many-function-args,
-
-
-[REPORTS]
-
-# Set the output format. Available formats are text, parseable, colorized, msvs
-# (visual studio) and html. You can also give a reporter class, eg
-# mypackage.mymodule.MyReporterClass.
-# DEFAULT:  output-format=text
-
-# Put messages in a separate file for each module / package specified on the
-# command line instead of printing them on stdout. Reports (if any) will be
-# written in a file name "pylint_global.[txt|html]".
-# DEFAULT:  files-output=no
-
-# Tells whether to display a full report or only the messages
-# DEFAULT:  reports=yes
-# RATIONALE:  run from Travis / tox, and don't need / want to parse output.
-reports=no
-
-# Python expression which should return a note less than 10 (10 is the highest
-# note). You have access to the variables errors warning, statement which
-# respectively contain the number of errors / warnings messages and the total
-# number of statements analyzed. This is used by the global evaluation report
-# (RP0004).
-# DEFAULT:  evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
-
-# Add a comment according to your evaluation note. This is used by the global
-# evaluation report (RP0004).
-# DEFAULT:  comment=no
-
-# Template used to display messages. This is a python new-style format string
-# used to format the message information. See doc for all details
-#msg-template=
-
-
-[SIMILARITIES]
-
-# Minimum lines number of a similarity.
-# DEFAULT:  min-similarity-lines=4
-min-similarity-lines=15
-
-# Ignore comments when computing similarities.
-# DEFAULT:  ignore-comments=yes
-
-# Ignore docstrings when computing similarities.
-# DEFAULT:  ignore-docstrings=yes
-
-# Ignore imports when computing similarities.
-# DEFAULT:  ignore-imports=no
-ignore-imports=yes
-
-
-[VARIABLES]
-
-# Tells whether we should check for unused import in __init__ files.
-# DEFAULT:  init-import=no
-
-# A regular expression matching the name of dummy variables (i.e. expectedly
-# not used).
-dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_)
-
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid to define new builtins when possible.
-# DEFAULT:  additional-builtins=
-
-
-[LOGGING]
-
-# Logging modules to check that the string format arguments are in logging
-# function parameter format
-# DEFAULT:  logging-modules=logging
-
-
-[FORMAT]
-
-# Maximum number of characters on a single line.
-# DEFAULT:  max-line-length=80
-
-# Regexp for a line that is allowed to be longer than the limit.
-# DEFAULT:  ignore-long-lines=^\s*(# )?<?https?://\S+>?$
-
-# Allow the body of an if to be on the same line as the test if there is no
-# else.
-# DEFAULT:  single-line-if-stmt=no
-
-# List of optional constructs for which whitespace checking is disabled
-# DEFAULT:  no-space-check=trailing-comma,dict-separator
-# RATIONALE: pylint ignores whitespace checks around the
-#            constructs "dict-separator" (cases like {1:2}) and
-#            "trailing-comma" (cases like {1: 2, }).
-#            By setting "no-space-check" to empty whitespace checks will be
-#            enforced around both constructs.
-no-space-check =
-
-# Maximum number of lines in a module
-# DEFAULT:  max-module-lines=1000
-max-module-lines=1500
-
-# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
-# tab).
-# DEFAULT:  indent-string='    '
-
-# Number of spaces of indent required inside a hanging or continued line.
-# DEFAULT:  indent-after-paren=4
-
-
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-# DEFAULT:  notes=FIXME,XXX,TODO
-
-
-[BASIC]
-
-# Regular expression which should only match function or class names that do
-# not require a docstring.
-# DEFAULT:  no-docstring-rgx=__.*__
-no-docstring-rgx=(__.*__|main)
-
-# Minimum line length for functions/classes that require docstrings, shorter
-# ones are exempt.
-# DEFAULT:  docstring-min-length=-1
-docstring-min-length=10
-
-# Regular expression which should only match correct module names. The
-# leading underscore is sanctioned for private modules by Google's style
-# guide.
-module-rgx=^(_?[a-z][a-z0-9_]*)|__init__$
-
-# Regular expression matching correct constant names
-# DEFAULT:  const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
-const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
-
-# Regular expression matching correct class attribute names
-# DEFAULT:  class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
-class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
-
-# Regular expression matching correct class names
-# DEFAULT:  class-rgx=[A-Z_][a-zA-Z0-9]+$
-class-rgx=^_?[A-Z][a-zA-Z0-9]*$
-
-# Regular expression which should only match correct function names.
-# 'camel_case' and 'snake_case' group names are used for consistency of naming
-# styles across functions and methods.
-function-rgx=^(?:(?P<camel_case>_?[A-Z][a-zA-Z0-9]*)|(?P<snake_case>_?[a-z][a-z0-9_]*))$
-
-# Regular expression which should only match correct method names.
-# 'camel_case' and 'snake_case' group names are used for consistency of naming
-# styles across functions and methods. 'exempt' indicates a name which is
-# consistent with all naming styles.
-method-rgx=^(?:(?P<exempt>__[a-z0-9_]+__|next)|(?P<camel_case>_{0,2}[A-Z][a-zA-Z0-9]*)|(?P<snake_case>_{0,2}[a-z][a-z0-9_]*))$
-
-# Regular expression matching correct attribute names
-# DEFAULT:  attr-rgx=[a-z_][a-z0-9_]{2,30}$
-attr-rgx=^_{0,2}[a-z][a-z0-9_]*$
-
-# Regular expression matching correct argument names
-# DEFAULT:  argument-rgx=[a-z_][a-z0-9_]{2,30}$
-argument-rgx=^[a-z][a-z0-9_]*$
-
-# Regular expression matching correct variable names
-# DEFAULT:  variable-rgx=[a-z_][a-z0-9_]{2,30}$
-variable-rgx=^[a-z][a-z0-9_]*$
-
-# Regular expression matching correct inline iteration names
-# DEFAULT:  inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
-inlinevar-rgx=^[a-z][a-z0-9_]*$
-
-# Good variable names which should always be accepted, separated by a comma
-# DEFAULT:  good-names=i,j,k,ex,Run,_
-good-names=main,_
-
-# Bad variable names which should always be refused, separated by a comma
-# DEFAULT:  bad-names=foo,bar,baz,toto,tutu,tata
-bad-names=
-
-# List of builtins function names that should not be used, separated by a comma
-# <http://go/python-style#Deprecated_Language_Features>
-bad-functions=input,apply,reduce
-
-
-[TYPECHECK]
-
-# Tells whether missing members accessed in mixin class should be ignored. A
-# mixin class is detected if its name ends with "mixin" (case insensitive).
-# DEFAULT:  ignore-mixin-members=yes
-
-# List of module names for which member attributes should not be checked
-# (useful for modules/projects where namespaces are manipulated during runtime
-# and thus existing member attributes cannot be deduced by static analysis
-# DEFAULT:  ignored-modules=
-
-# List of classes names for which member attributes should not be checked
-# (useful for classes with attributes dynamically set).
-# DEFAULT:  ignored-classes=SQLObject
-
-# When zope mode is activated, add a predefined set of Zope acquired attributes
-# to generated-members.
-# DEFAULT:  zope=no
-
-# List of members which are set dynamically and missed by pylint inference
-# system, and so shouldn't trigger E0201 when accessed. Python regular
-# expressions are accepted.
-# DEFAULT:  generated-members=REQUEST,acl_users,aq_parent
-
-
-[IMPORTS]
-
-# Deprecated modules which should not be used, separated by a comma
-# DEFAULT:  deprecated-modules=regsub,TERMIOS,Bastion,rexec
-
-# Create a graph of every (i.e. internal and external) dependencies in the
-# given file (report RP0402 must not be disabled)
-# DEFAULT:  import-graph=
-
-# Create a graph of external dependencies in the given file (report RP0402 must
-# not be disabled)
-# DEFAULT:  ext-import-graph=
-
-# Create a graph of internal dependencies in the given file (report RP0402 must
-# not be disabled)
-# DEFAULT:  int-import-graph=
-
-
-[CLASSES]
-
-# List of interface methods to ignore, separated by a comma. This is used for
-# instance to not check methods defines in Zope's Interface base class.
-# DEFAULT:  ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
-
-# List of method names used to declare (i.e. assign) instance attributes.
-# DEFAULT:  defining-attr-methods=__init__,__new__,setUp
-
-# List of valid names for the first argument in a class method.
-# DEFAULT:  valid-classmethod-first-arg=cls
-
-# List of valid names for the first argument in a metaclass class method.
-# DEFAULT:  valid-metaclass-classmethod-first-arg=mcs
-
-
-[DESIGN]
-
-# Maximum number of arguments for function / method
-# DEFAULT:  max-args=5
-# RATIONALE:  API-mapping
-max-args = 14
-
-# Argument names that match this expression will be ignored. Default to name
-# with leading underscore
-# DEFAULT:  ignored-argument-names=_.*
-
-# Maximum number of locals for function / method body
-# DEFAULT:  max-locals=15
-max-locals=24
-
-# Maximum number of return / yield for function / method body
-# DEFAULT:  max-returns=6
-max-returns=9
-
-# Maximum number of branch for function / method body
-# DEFAULT:  max-branches=12
-max-branches=21
-
-# Maximum number of statements in function / method body
-# DEFAULT:  max-statements=50
-
-# Maximum number of parents for a class (see R0901).
-# DEFAULT:  max-parents=7
-
-# Maximum number of attributes for a class (see R0902).
-# DEFAULT:  max-attributes=7
-# RATIONALE:  API mapping
-max-attributes=19
-
-# Minimum number of public methods for a class (see R0903).
-# DEFAULT:  min-public-methods=2
-# RATIONALE:  context mgrs may have *no* public methods
-min-public-methods=0
-
-# Maximum number of public methods for a class (see R0904).
-# DEFAULT:  max-public-methods=20
-# RATIONALE:  API mapping
-max-public-methods=40
-
-[ELIF]
-max-nested-blocks=6
-
-[EXCEPTIONS]
-
-# Exceptions that will emit a warning when being caught. Defaults to
-# "Exception"
-# DEFAULT:  overgeneral-exceptions=Exception
diff --git a/ez_setup.py b/ez_setup.py
index be314e4..18bd16c 100755
--- a/ez_setup.py
+++ b/ez_setup.py
@@ -91,7 +91,7 @@
         return do_download()       
     try:
         pkg_resources.require("setuptools>="+version); return
-    except pkg_resources.VersionConflict, e:
+    except pkg_resources.VersionConflict as e:
         if was_imported:
             print >>sys.stderr, (
             "The required version of setuptools (>=%s) is not available, and\n"
diff --git a/run_pylint.py b/run_pylint.py
deleted file mode 100644
index c53943f..0000000
--- a/run_pylint.py
+++ /dev/null
@@ -1,235 +0,0 @@
-#
-# Copyright 2015 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Custom script to run PyLint on apitools codebase.
-
-"Inspired" by the similar script in gcloud-python.
-
-This runs pylint as a script via subprocess in two different
-subprocesses. The first lints the production/library code
-using the default rc file (PRODUCTION_RC). The second lints the
-demo/test code using an rc file (TEST_RC) which allows more style
-violations (hence it has a reduced number of style checks).
-"""
-
-import ConfigParser
-import copy
-import os
-import subprocess
-import sys
-
-
-IGNORED_DIRECTORIES = [
-    'apitools/gen/testdata',
-    'samples/bigquery_sample/bigquery_v2',
-    'samples/dns_sample/dns_v1',
-    'samples/fusiontables_sample/fusiontables_v1',
-    'samples/iam_sample/iam_v1',
-    'samples/servicemanagement_sample/servicemanagement_v1',
-    'samples/storage_sample/storage_v1',
-    'venv',
-]
-IGNORED_FILES = [
-    'ez_setup.py',
-    'run_pylint.py',
-    'setup.py',
-]
-PRODUCTION_RC = 'default.pylintrc'
-TEST_RC = 'reduced.pylintrc'
-TEST_DISABLED_MESSAGES = [
-    'exec-used',
-    'invalid-name',
-    'missing-docstring',
-    'protected-access',
-]
-TEST_RC_ADDITIONS = {
-    'MESSAGES CONTROL': {
-        'disable': ',\n'.join(TEST_DISABLED_MESSAGES),
-    },
-}
-
-
-def read_config(filename):
-    """Reads pylintrc config onto native ConfigParser object."""
-    config = ConfigParser.ConfigParser()
-    with open(filename, 'r') as file_obj:
-        config.readfp(file_obj)
-    return config
-
-
-def make_test_rc(base_rc_filename, additions_dict, target_filename):
-    """Combines a base rc and test additions into single file."""
-    main_cfg = read_config(base_rc_filename)
-
-    # Create fresh config for test, which must extend production.
-    test_cfg = ConfigParser.ConfigParser()
-    test_cfg._sections = copy.deepcopy(main_cfg._sections)
-
-    for section, opts in additions_dict.items():
-        curr_section = test_cfg._sections.setdefault(
-            section, test_cfg._dict())
-        for opt, opt_val in opts.items():
-            curr_val = curr_section.get(opt)
-            if curr_val is None:
-                raise KeyError('Expected to be adding to existing option.')
-            curr_section[opt] = '%s\n%s' % (curr_val, opt_val)
-
-    with open(target_filename, 'w') as file_obj:
-        test_cfg.write(file_obj)
-
-
-def valid_filename(filename):
-    """Checks if a file is a Python file and is not ignored."""
-    for directory in IGNORED_DIRECTORIES:
-        if filename.startswith(directory):
-            return False
-    return (filename.endswith('.py') and
-            filename not in IGNORED_FILES)
-
-
-def is_production_filename(filename):
-    """Checks if the file contains production code.
-
-    :rtype: boolean
-    :returns: Boolean indicating production status.
-    """
-    return not ('demo' in filename or 'test' in filename or
-                filename.startswith('regression'))
-
-
-def get_files_for_linting(allow_limited=True, diff_base=None):
-    """Gets a list of files in the repository.
-
-    By default, returns all files via ``git ls-files``. However, in some cases
-    uses a specific commit or branch (a so-called diff base) to compare
-    against for changed files. (This requires ``allow_limited=True``.)
-
-    To speed up linting on Travis pull requests against master, we manually
-    set the diff base to origin/master. We don't do this on non-pull requests
-    since origin/master will be equivalent to the currently checked out code.
-    One could potentially use ${TRAVIS_COMMIT_RANGE} to find a diff base but
-    this value is not dependable.
-
-    :type allow_limited: boolean
-    :param allow_limited: Boolean indicating if a reduced set of files can
-                          be used.
-
-    :rtype: pair
-    :returns: Tuple of the diff base using the the list of filenames to be
-              linted.
-    """
-    if os.getenv('TRAVIS') == 'true':
-        # In travis, don't default to master.
-        diff_base = None
-
-    if (os.getenv('TRAVIS_BRANCH') == 'master' and
-            os.getenv('TRAVIS_PULL_REQUEST') != 'false'):
-        # In the case of a pull request into master, we want to
-        # diff against HEAD in master.
-        diff_base = 'origin/master'
-
-    if diff_base is not None and allow_limited:
-        result = subprocess.check_output(['git', 'diff', '--name-only',
-                                          diff_base])
-        print 'Using files changed relative to %s:' % (diff_base,)
-        print '-' * 60
-        print result.rstrip('\n')  # Don't print trailing newlines.
-        print '-' * 60
-    else:
-        print 'Diff base not specified, listing all files in repository.'
-        result = subprocess.check_output(['git', 'ls-files'])
-
-    return result.rstrip('\n').split('\n'), diff_base
-
-
-def get_python_files(all_files=None, diff_base=None):
-    """Gets a list of all Python files in the repository that need linting.
-
-    Relies on :func:`get_files_for_linting()` to determine which files should
-    be considered.
-
-    NOTE: This requires ``git`` to be installed and requires that this
-          is run within the ``git`` repository.
-
-    :type all_files: list or ``NoneType``
-    :param all_files: Optional list of files to be linted.
-
-    :rtype: tuple
-    :returns: A tuple containing two lists and a boolean. The first list
-              contains all production files, the next all test/demo files and
-              the boolean indicates if a restricted fileset was used.
-    """
-    using_restricted = False
-    if all_files is None:
-        all_files, diff_base = get_files_for_linting(diff_base=diff_base)
-        using_restricted = diff_base is not None
-
-    library_files = []
-    non_library_files = []
-    for filename in all_files:
-        if valid_filename(filename):
-            if is_production_filename(filename):
-                library_files.append(filename)
-            else:
-                non_library_files.append(filename)
-
-    return library_files, non_library_files, using_restricted
-
-
-def lint_fileset(filenames, rcfile, description):
-    """Lints a group of files using a given rcfile."""
-    # Only lint filenames that exist. For example, 'git diff --name-only'
-    # could spit out deleted / renamed files. Another alternative could
-    # be to use 'git diff --name-status' and filter out files with a
-    # status of 'D'.
-    filenames = [filename for filename in filenames
-                 if os.path.exists(filename)]
-    if filenames:
-        rc_flag = '--rcfile=%s' % (rcfile,)
-        pylint_shell_command = ['pylint', rc_flag] + filenames
-        status_code = subprocess.call(pylint_shell_command)
-        if status_code != 0:
-            error_message = ('Pylint failed on %s with '
-                             'status %d.' % (description, status_code))
-            print >> sys.stderr, error_message
-            sys.exit(status_code)
-    else:
-        print 'Skipping %s, no files to lint.' % (description,)
-
-
-def main(argv):
-    """Script entry point. Lints both sets of files."""
-    diff_base = argv[1] if len(argv) > 1 else None
-    make_test_rc(PRODUCTION_RC, TEST_RC_ADDITIONS, TEST_RC)
-    library_files, non_library_files, using_restricted = get_python_files(
-        diff_base=diff_base)
-    try:
-        lint_fileset(library_files, PRODUCTION_RC, 'library code')
-        lint_fileset(non_library_files, TEST_RC, 'test and demo code')
-    except SystemExit:
-        if not using_restricted:
-            raise
-
-        message = 'Restricted lint failed, expanding to full fileset.'
-        print >> sys.stderr, message
-        all_files, _ = get_files_for_linting(allow_limited=False)
-        library_files, non_library_files, _ = get_python_files(
-            all_files=all_files)
-        lint_fileset(library_files, PRODUCTION_RC, 'library code')
-        lint_fileset(non_library_files, TEST_RC, 'test and demo code')
-
-
-if __name__ == '__main__':
-    main(sys.argv)
diff --git a/samples/bigquery_sample/bigquery_v2/bigquery_v2.py b/samples/bigquery_sample/bigquery_v2/bigquery_v2.py
deleted file mode 100644
index 7cd69b5..0000000
--- a/samples/bigquery_sample/bigquery_v2/bigquery_v2.py
+++ /dev/null
@@ -1,1096 +0,0 @@
-#!/usr/bin/env python
-"""CLI for bigquery, version v2."""
-# NOTE: This file is autogenerated and should not be edited by hand.
-
-import code
-import os
-import platform
-import sys
-
-from apitools.base.protorpclite import message_types
-from apitools.base.protorpclite import messages
-
-from google.apputils import appcommands
-import gflags as flags
-
-import apitools.base.py as apitools_base
-from apitools.base.py import cli as apitools_base_cli
-import bigquery_v2_client as client_lib
-import bigquery_v2_messages as messages
-
-
-def _DeclareBigqueryFlags():
-  """Declare global flags in an idempotent way."""
-  if 'api_endpoint' in flags.FLAGS:
-    return
-  flags.DEFINE_string(
-      'api_endpoint',
-      u'https://www.googleapis.com/bigquery/v2/',
-      'URL of the API endpoint to use.',
-      short_name='bigquery_url')
-  flags.DEFINE_string(
-      'history_file',
-      u'~/.bigquery.v2.history',
-      'File with interactive shell history.')
-  flags.DEFINE_multistring(
-      'add_header', [],
-      'Additional http headers (as key=value strings). '
-      'Can be specified multiple times.')
-  flags.DEFINE_string(
-      'service_account_json_keyfile', '',
-      'Filename for a JSON service account key downloaded'
-      ' from the Developer Console.')
-  flags.DEFINE_enum(
-      'alt',
-      u'json',
-      [u'json'],
-      u'Data format for the response.')
-  flags.DEFINE_string(
-      'fields',
-      None,
-      u'Selector specifying which fields to include in a partial response.')
-  flags.DEFINE_string(
-      'key',
-      None,
-      u'API key. Your API key identifies your project and provides you with '
-      u'API access, quota, and reports. Required unless you provide an OAuth '
-      u'2.0 token.')
-  flags.DEFINE_string(
-      'oauth_token',
-      None,
-      u'OAuth 2.0 token for the current user.')
-  flags.DEFINE_boolean(
-      'prettyPrint',
-      'True',
-      u'Returns response with indentations and line breaks.')
-  flags.DEFINE_string(
-      'quotaUser',
-      None,
-      u'Available to use for quota purposes for server-side applications. Can'
-      u' be any arbitrary string assigned to a user, but should not exceed 40'
-      u' characters. Overrides userIp if both are provided.')
-  flags.DEFINE_string(
-      'trace',
-      None,
-      'A tracing token of the form "token:<tokenid>" to include in api '
-      'requests.')
-  flags.DEFINE_string(
-      'userIp',
-      None,
-      u'IP address of the site where the request originates. Use this if you '
-      u'want to enforce per-user limits.')
-
-
-FLAGS = flags.FLAGS
-apitools_base_cli.DeclareBaseFlags()
-_DeclareBigqueryFlags()
-
-
-def GetGlobalParamsFromFlags():
-  """Return a StandardQueryParameters based on flags."""
-  result = messages.StandardQueryParameters()
-  if FLAGS['alt'].present:
-    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
-  if FLAGS['fields'].present:
-    result.fields = FLAGS.fields.decode('utf8')
-  if FLAGS['key'].present:
-    result.key = FLAGS.key.decode('utf8')
-  if FLAGS['oauth_token'].present:
-    result.oauth_token = FLAGS.oauth_token.decode('utf8')
-  if FLAGS['prettyPrint'].present:
-    result.prettyPrint = FLAGS.prettyPrint
-  if FLAGS['quotaUser'].present:
-    result.quotaUser = FLAGS.quotaUser.decode('utf8')
-  if FLAGS['trace'].present:
-    result.trace = FLAGS.trace.decode('utf8')
-  if FLAGS['userIp'].present:
-    result.userIp = FLAGS.userIp.decode('utf8')
-  return result
-
-
-def GetClientFromFlags():
-  """Return a client object, configured from flags."""
-  log_request = FLAGS.log_request or FLAGS.log_request_response
-  log_response = FLAGS.log_response or FLAGS.log_request_response
-  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
-  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
-  credentials_args = {
-      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
-  }
-  try:
-    client = client_lib.BigqueryV2(
-        api_endpoint, log_request=log_request,
-        log_response=log_response,
-        credentials_args=credentials_args,
-        additional_http_headers=additional_http_headers)
-  except apitools_base.CredentialsError as e:
-    print 'Error creating credentials: %s' % e
-    sys.exit(1)
-  return client
-
-
-class PyShell(appcommands.Cmd):
-
-  def Run(self, _):
-    """Run an interactive python shell with the client."""
-    client = GetClientFromFlags()
-    params = GetGlobalParamsFromFlags()
-    for field in params.all_fields():
-      value = params.get_assigned_value(field.name)
-      if value != field.default:
-        client.AddGlobalParam(field.name, value)
-    banner = """
-           == bigquery interactive console ==
-                 client: a bigquery client
-          apitools_base: base apitools module
-         messages: the generated messages module
-    """
-    local_vars = {
-        'apitools_base': apitools_base,
-        'client': client,
-        'client_lib': client_lib,
-        'messages': messages,
-    }
-    if platform.system() == 'Linux':
-      console = apitools_base_cli.ConsoleWithReadline(
-          local_vars, histfile=FLAGS.history_file)
-    else:
-      console = code.InteractiveConsole(local_vars)
-    try:
-      console.interact(banner)
-    except SystemExit as e:
-      return e.code
-
-
-class DatasetsDelete(apitools_base_cli.NewCmd):
-  """Command wrapping datasets.Delete."""
-
-  usage = """datasets_delete <projectId> <datasetId>"""
-
-  def __init__(self, name, fv):
-    super(DatasetsDelete, self).__init__(name, fv)
-    flags.DEFINE_boolean(
-        'deleteContents',
-        None,
-        u'If True, delete all the tables in the dataset. If False and the '
-        u'dataset contains tables, the request will fail. Default is False',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, datasetId):
-    """Deletes the dataset specified by the datasetId value. Before you can
-    delete a dataset, you must delete all its tables, either manually or by
-    specifying deleteContents. Immediately after deletion, you can create
-    another dataset with the same name.
-
-    Args:
-      projectId: Project ID of the dataset being deleted
-      datasetId: Dataset ID of dataset being deleted
-
-    Flags:
-      deleteContents: If True, delete all the tables in the dataset. If False
-        and the dataset contains tables, the request will fail. Default is
-        False
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryDatasetsDeleteRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        )
-    if FLAGS['deleteContents'].present:
-      request.deleteContents = FLAGS.deleteContents
-    result = client.datasets.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DatasetsGet(apitools_base_cli.NewCmd):
-  """Command wrapping datasets.Get."""
-
-  usage = """datasets_get <projectId> <datasetId>"""
-
-  def __init__(self, name, fv):
-    super(DatasetsGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, projectId, datasetId):
-    """Returns the dataset specified by datasetID.
-
-    Args:
-      projectId: Project ID of the requested dataset
-      datasetId: Dataset ID of the requested dataset
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryDatasetsGetRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        )
-    result = client.datasets.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DatasetsInsert(apitools_base_cli.NewCmd):
-  """Command wrapping datasets.Insert."""
-
-  usage = """datasets_insert <projectId>"""
-
-  def __init__(self, name, fv):
-    super(DatasetsInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'dataset',
-        None,
-        u'A Dataset resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId):
-    """Creates a new empty dataset.
-
-    Args:
-      projectId: Project ID of the new dataset
-
-    Flags:
-      dataset: A Dataset resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryDatasetsInsertRequest(
-        projectId=projectId.decode('utf8'),
-        )
-    if FLAGS['dataset'].present:
-      request.dataset = apitools_base.JsonToMessage(messages.Dataset, FLAGS.dataset)
-    result = client.datasets.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DatasetsList(apitools_base_cli.NewCmd):
-  """Command wrapping datasets.List."""
-
-  usage = """datasets_list <projectId>"""
-
-  def __init__(self, name, fv):
-    super(DatasetsList, self).__init__(name, fv)
-    flags.DEFINE_boolean(
-        'all',
-        None,
-        u'Whether to list all datasets, including hidden ones',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'filter',
-        None,
-        u'An expression for filtering the results of the request by label. '
-        u'The syntax is "labels.[:]". Multiple filters can be ANDed together '
-        u'by connecting with a space. Example: "labels.department:receiving '
-        u'labels.active". See https://cloud.google.com/bigquery/docs'
-        u'/labeling-datasets#filtering_datasets_using_labels for details.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'The maximum number of results to return',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Page token, returned by a previous call, to request the next page '
-        u'of results',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId):
-    """Lists all datasets in the specified project to which you have been
-    granted the READER dataset role.
-
-    Args:
-      projectId: Project ID of the datasets to be listed
-
-    Flags:
-      all: Whether to list all datasets, including hidden ones
-      filter: An expression for filtering the results of the request by label.
-        The syntax is "labels.[:]". Multiple filters can be ANDed together by
-        connecting with a space. Example: "labels.department:receiving
-        labels.active". See https://cloud.google.com/bigquery/docs/labeling-
-        datasets#filtering_datasets_using_labels for details.
-      maxResults: The maximum number of results to return
-      pageToken: Page token, returned by a previous call, to request the next
-        page of results
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryDatasetsListRequest(
-        projectId=projectId.decode('utf8'),
-        )
-    if FLAGS['all'].present:
-      request.all = FLAGS.all
-    if FLAGS['filter'].present:
-      request.filter = FLAGS.filter.decode('utf8')
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.datasets.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DatasetsPatch(apitools_base_cli.NewCmd):
-  """Command wrapping datasets.Patch."""
-
-  usage = """datasets_patch <projectId> <datasetId>"""
-
-  def __init__(self, name, fv):
-    super(DatasetsPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'dataset',
-        None,
-        u'A Dataset resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, datasetId):
-    """Updates information in an existing dataset. The update method replaces
-    the entire dataset resource, whereas the patch method only replaces fields
-    that are provided in the submitted dataset resource. This method supports
-    patch semantics.
-
-    Args:
-      projectId: Project ID of the dataset being updated
-      datasetId: Dataset ID of the dataset being updated
-
-    Flags:
-      dataset: A Dataset resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryDatasetsPatchRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        )
-    if FLAGS['dataset'].present:
-      request.dataset = apitools_base.JsonToMessage(messages.Dataset, FLAGS.dataset)
-    result = client.datasets.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DatasetsUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping datasets.Update."""
-
-  usage = """datasets_update <projectId> <datasetId>"""
-
-  def __init__(self, name, fv):
-    super(DatasetsUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'dataset',
-        None,
-        u'A Dataset resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, datasetId):
-    """Updates information in an existing dataset. The update method replaces
-    the entire dataset resource, whereas the patch method only replaces fields
-    that are provided in the submitted dataset resource.
-
-    Args:
-      projectId: Project ID of the dataset being updated
-      datasetId: Dataset ID of the dataset being updated
-
-    Flags:
-      dataset: A Dataset resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryDatasetsUpdateRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        )
-    if FLAGS['dataset'].present:
-      request.dataset = apitools_base.JsonToMessage(messages.Dataset, FLAGS.dataset)
-    result = client.datasets.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class JobsCancel(apitools_base_cli.NewCmd):
-  """Command wrapping jobs.Cancel."""
-
-  usage = """jobs_cancel <projectId> <jobId>"""
-
-  def __init__(self, name, fv):
-    super(JobsCancel, self).__init__(name, fv)
-
-  def RunWithArgs(self, projectId, jobId):
-    """Requests that a job be cancelled. This call will return immediately,
-    and the client will need to poll for the job status to see if the cancel
-    completed successfully. Cancelled jobs may still incur costs.
-
-    Args:
-      projectId: [Required] Project ID of the job to cancel
-      jobId: [Required] Job ID of the job to cancel
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryJobsCancelRequest(
-        projectId=projectId.decode('utf8'),
-        jobId=jobId.decode('utf8'),
-        )
-    result = client.jobs.Cancel(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class JobsGet(apitools_base_cli.NewCmd):
-  """Command wrapping jobs.Get."""
-
-  usage = """jobs_get <projectId> <jobId>"""
-
-  def __init__(self, name, fv):
-    super(JobsGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, projectId, jobId):
-    """Returns information about a specific job. Job information is available
-    for a six month period after creation. Requires that you're the person who
-    ran the job, or have the Is Owner project role.
-
-    Args:
-      projectId: [Required] Project ID of the requested job
-      jobId: [Required] Job ID of the requested job
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryJobsGetRequest(
-        projectId=projectId.decode('utf8'),
-        jobId=jobId.decode('utf8'),
-        )
-    result = client.jobs.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class JobsGetQueryResults(apitools_base_cli.NewCmd):
-  """Command wrapping jobs.GetQueryResults."""
-
-  usage = """jobs_getQueryResults <projectId> <jobId>"""
-
-  def __init__(self, name, fv):
-    super(JobsGetQueryResults, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of results to read',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Page token, returned by a previous call, to request the next page '
-        u'of results',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'startIndex',
-        None,
-        u'Zero-based index of the starting row',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'timeoutMs',
-        None,
-        u'How long to wait for the query to complete, in milliseconds, before'
-        u' returning. Default is 10 seconds. If the timeout passes before the'
-        u" job completes, the 'jobComplete' field in the response will be "
-        u'false',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, jobId):
-    """Retrieves the results of a query job.
-
-    Args:
-      projectId: [Required] Project ID of the query job
-      jobId: [Required] Job ID of the query job
-
-    Flags:
-      maxResults: Maximum number of results to read
-      pageToken: Page token, returned by a previous call, to request the next
-        page of results
-      startIndex: Zero-based index of the starting row
-      timeoutMs: How long to wait for the query to complete, in milliseconds,
-        before returning. Default is 10 seconds. If the timeout passes before
-        the job completes, the 'jobComplete' field in the response will be
-        false
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryJobsGetQueryResultsRequest(
-        projectId=projectId.decode('utf8'),
-        jobId=jobId.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['startIndex'].present:
-      request.startIndex = int(FLAGS.startIndex)
-    if FLAGS['timeoutMs'].present:
-      request.timeoutMs = FLAGS.timeoutMs
-    result = client.jobs.GetQueryResults(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class JobsInsert(apitools_base_cli.NewCmd):
-  """Command wrapping jobs.Insert."""
-
-  usage = """jobs_insert <projectId>"""
-
-  def __init__(self, name, fv):
-    super(JobsInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'job',
-        None,
-        u'A Job resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'upload_filename',
-        '',
-        'Filename to use for upload.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'upload_mime_type',
-        '',
-        'MIME type to use for the upload. Only needed if the extension on '
-        '--upload_filename does not determine the correct (or any) MIME '
-        'type.',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId):
-    """Starts a new asynchronous job. Requires the Can View project role.
-
-    Args:
-      projectId: Project ID of the project that will be billed for the job
-
-    Flags:
-      job: A Job resource to be passed as the request body.
-      upload_filename: Filename to use for upload.
-      upload_mime_type: MIME type to use for the upload. Only needed if the
-        extension on --upload_filename does not determine the correct (or any)
-        MIME type.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryJobsInsertRequest(
-        projectId=projectId.decode('utf8'),
-        )
-    if FLAGS['job'].present:
-      request.job = apitools_base.JsonToMessage(messages.Job, FLAGS.job)
-    upload = None
-    if FLAGS.upload_filename:
-      upload = apitools_base.Upload.FromFile(
-          FLAGS.upload_filename, FLAGS.upload_mime_type,
-          progress_callback=apitools_base.UploadProgressPrinter,
-          finish_callback=apitools_base.UploadCompletePrinter)
-    result = client.jobs.Insert(
-        request, global_params=global_params, upload=upload)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class JobsList(apitools_base_cli.NewCmd):
-  """Command wrapping jobs.List."""
-
-  usage = """jobs_list <projectId>"""
-
-  def __init__(self, name, fv):
-    super(JobsList, self).__init__(name, fv)
-    flags.DEFINE_boolean(
-        'allUsers',
-        None,
-        u'Whether to display jobs owned by all users in the project. Default '
-        u'false',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of results to return',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Page token, returned by a previous call, to request the next page '
-        u'of results',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'minimal'],
-        u'Restrict information returned to a set of selected fields',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'stateFilter',
-        u'done',
-        [u'done', u'pending', u'running'],
-        u'Filter for job state',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId):
-    """Lists all jobs that you started in the specified project. Job
-    information is available for a six month period after creation. The job
-    list is sorted in reverse chronological order, by job creation time.
-    Requires the Can View project role, or the Is Owner project role if you
-    set the allUsers property.
-
-    Args:
-      projectId: Project ID of the jobs to list
-
-    Flags:
-      allUsers: Whether to display jobs owned by all users in the project.
-        Default false
-      maxResults: Maximum number of results to return
-      pageToken: Page token, returned by a previous call, to request the next
-        page of results
-      projection: Restrict information returned to a set of selected fields
-      stateFilter: Filter for job state
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryJobsListRequest(
-        projectId=projectId.decode('utf8'),
-        )
-    if FLAGS['allUsers'].present:
-      request.allUsers = FLAGS.allUsers
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['projection'].present:
-      request.projection = messages.BigqueryJobsListRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    if FLAGS['stateFilter'].present:
-      request.stateFilter = [messages.BigqueryJobsListRequest.StateFilterValueValuesEnum(x) for x in FLAGS.stateFilter]
-    result = client.jobs.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class JobsQuery(apitools_base_cli.NewCmd):
-  """Command wrapping jobs.Query."""
-
-  usage = """jobs_query <projectId>"""
-
-  def __init__(self, name, fv):
-    super(JobsQuery, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'queryRequest',
-        None,
-        u'A QueryRequest resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId):
-    """Runs a BigQuery SQL query synchronously and returns query results if
-    the query completes within a specified timeout.
-
-    Args:
-      projectId: Project ID of the project billed for the query
-
-    Flags:
-      queryRequest: A QueryRequest resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryJobsQueryRequest(
-        projectId=projectId.decode('utf8'),
-        )
-    if FLAGS['queryRequest'].present:
-      request.queryRequest = apitools_base.JsonToMessage(messages.QueryRequest, FLAGS.queryRequest)
-    result = client.jobs.Query(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsList(apitools_base_cli.NewCmd):
-  """Command wrapping projects.List."""
-
-  usage = """projects_list"""
-
-  def __init__(self, name, fv):
-    super(ProjectsList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of results to return',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Page token, returned by a previous call, to request the next page '
-        u'of results',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """Lists all projects to which you have been granted any project role.
-
-    Flags:
-      maxResults: Maximum number of results to return
-      pageToken: Page token, returned by a previous call, to request the next
-        page of results
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryProjectsListRequest(
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.projects.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TabledataInsertAll(apitools_base_cli.NewCmd):
-  """Command wrapping tabledata.InsertAll."""
-
-  usage = """tabledata_insertAll <projectId> <datasetId> <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TabledataInsertAll, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'tableDataInsertAllRequest',
-        None,
-        u'A TableDataInsertAllRequest resource to be passed as the request '
-        u'body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, datasetId, tableId):
-    """Streams data into BigQuery one record at a time without needing to run
-    a load job. Requires the WRITER dataset role.
-
-    Args:
-      projectId: Project ID of the destination table.
-      datasetId: Dataset ID of the destination table.
-      tableId: Table ID of the destination table.
-
-    Flags:
-      tableDataInsertAllRequest: A TableDataInsertAllRequest resource to be
-        passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryTabledataInsertAllRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['tableDataInsertAllRequest'].present:
-      request.tableDataInsertAllRequest = apitools_base.JsonToMessage(messages.TableDataInsertAllRequest, FLAGS.tableDataInsertAllRequest)
-    result = client.tabledata.InsertAll(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TabledataList(apitools_base_cli.NewCmd):
-  """Command wrapping tabledata.List."""
-
-  usage = """tabledata_list <projectId> <datasetId> <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TabledataList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of results to return',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Page token, returned by a previous call, identifying the result set',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'startIndex',
-        None,
-        u'Zero-based index of the starting row to read',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, datasetId, tableId):
-    """Retrieves table data from a specified set of rows. Requires the READER
-    dataset role.
-
-    Args:
-      projectId: Project ID of the table to read
-      datasetId: Dataset ID of the table to read
-      tableId: Table ID of the table to read
-
-    Flags:
-      maxResults: Maximum number of results to return
-      pageToken: Page token, returned by a previous call, identifying the
-        result set
-      startIndex: Zero-based index of the starting row to read
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryTabledataListRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['startIndex'].present:
-      request.startIndex = int(FLAGS.startIndex)
-    result = client.tabledata.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TablesDelete(apitools_base_cli.NewCmd):
-  """Command wrapping tables.Delete."""
-
-  usage = """tables_delete <projectId> <datasetId> <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TablesDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, projectId, datasetId, tableId):
-    """Deletes the table specified by tableId from the dataset. If the table
-    contains data, all the data will be deleted.
-
-    Args:
-      projectId: Project ID of the table to delete
-      datasetId: Dataset ID of the table to delete
-      tableId: Table ID of the table to delete
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryTablesDeleteRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        tableId=tableId.decode('utf8'),
-        )
-    result = client.tables.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TablesGet(apitools_base_cli.NewCmd):
-  """Command wrapping tables.Get."""
-
-  usage = """tables_get <projectId> <datasetId> <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TablesGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, projectId, datasetId, tableId):
-    """Gets the specified table resource by table ID. This method does not
-    return the data in the table, it only returns the table resource, which
-    describes the structure of this table.
-
-    Args:
-      projectId: Project ID of the requested table
-      datasetId: Dataset ID of the requested table
-      tableId: Table ID of the requested table
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryTablesGetRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        tableId=tableId.decode('utf8'),
-        )
-    result = client.tables.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TablesInsert(apitools_base_cli.NewCmd):
-  """Command wrapping tables.Insert."""
-
-  usage = """tables_insert <projectId> <datasetId>"""
-
-  def __init__(self, name, fv):
-    super(TablesInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'table',
-        None,
-        u'A Table resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, datasetId):
-    """Creates a new, empty table in the dataset.
-
-    Args:
-      projectId: Project ID of the new table
-      datasetId: Dataset ID of the new table
-
-    Flags:
-      table: A Table resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryTablesInsertRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        )
-    if FLAGS['table'].present:
-      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
-    result = client.tables.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TablesList(apitools_base_cli.NewCmd):
-  """Command wrapping tables.List."""
-
-  usage = """tables_list <projectId> <datasetId>"""
-
-  def __init__(self, name, fv):
-    super(TablesList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of results to return',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Page token, returned by a previous call, to request the next page '
-        u'of results',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, datasetId):
-    """Lists all tables in the specified dataset. Requires the READER dataset
-    role.
-
-    Args:
-      projectId: Project ID of the tables to list
-      datasetId: Dataset ID of the tables to list
-
-    Flags:
-      maxResults: Maximum number of results to return
-      pageToken: Page token, returned by a previous call, to request the next
-        page of results
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryTablesListRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.tables.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TablesPatch(apitools_base_cli.NewCmd):
-  """Command wrapping tables.Patch."""
-
-  usage = """tables_patch <projectId> <datasetId> <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TablesPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'table',
-        None,
-        u'A Table resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, datasetId, tableId):
-    """Updates information in an existing table. The update method replaces
-    the entire table resource, whereas the patch method only replaces fields
-    that are provided in the submitted table resource. This method supports
-    patch semantics.
-
-    Args:
-      projectId: Project ID of the table to update
-      datasetId: Dataset ID of the table to update
-      tableId: Table ID of the table to update
-
-    Flags:
-      table: A Table resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryTablesPatchRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['table'].present:
-      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
-    result = client.tables.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TablesUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping tables.Update."""
-
-  usage = """tables_update <projectId> <datasetId> <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TablesUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'table',
-        None,
-        u'A Table resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, projectId, datasetId, tableId):
-    """Updates information in an existing table. The update method replaces
-    the entire table resource, whereas the patch method only replaces fields
-    that are provided in the submitted table resource.
-
-    Args:
-      projectId: Project ID of the table to update
-      datasetId: Dataset ID of the table to update
-      tableId: Table ID of the table to update
-
-    Flags:
-      table: A Table resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BigqueryTablesUpdateRequest(
-        projectId=projectId.decode('utf8'),
-        datasetId=datasetId.decode('utf8'),
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['table'].present:
-      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
-    result = client.tables.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-def main(_):
-  appcommands.AddCmd('pyshell', PyShell)
-  appcommands.AddCmd('datasets_delete', DatasetsDelete)
-  appcommands.AddCmd('datasets_get', DatasetsGet)
-  appcommands.AddCmd('datasets_insert', DatasetsInsert)
-  appcommands.AddCmd('datasets_list', DatasetsList)
-  appcommands.AddCmd('datasets_patch', DatasetsPatch)
-  appcommands.AddCmd('datasets_update', DatasetsUpdate)
-  appcommands.AddCmd('jobs_cancel', JobsCancel)
-  appcommands.AddCmd('jobs_get', JobsGet)
-  appcommands.AddCmd('jobs_getQueryResults', JobsGetQueryResults)
-  appcommands.AddCmd('jobs_insert', JobsInsert)
-  appcommands.AddCmd('jobs_list', JobsList)
-  appcommands.AddCmd('jobs_query', JobsQuery)
-  appcommands.AddCmd('projects_list', ProjectsList)
-  appcommands.AddCmd('tabledata_insertAll', TabledataInsertAll)
-  appcommands.AddCmd('tabledata_list', TabledataList)
-  appcommands.AddCmd('tables_delete', TablesDelete)
-  appcommands.AddCmd('tables_get', TablesGet)
-  appcommands.AddCmd('tables_insert', TablesInsert)
-  appcommands.AddCmd('tables_list', TablesList)
-  appcommands.AddCmd('tables_patch', TablesPatch)
-  appcommands.AddCmd('tables_update', TablesUpdate)
-
-  apitools_base_cli.SetupLogger()
-  if hasattr(appcommands, 'SetDefaultCommand'):
-    appcommands.SetDefaultCommand('pyshell')
-
-
-run_main = apitools_base_cli.run_main
-
-if __name__ == '__main__':
-  appcommands.Run()
diff --git a/samples/bigquery_sample/bigquery_v2/bigquery_v2_client.py b/samples/bigquery_sample/bigquery_v2/bigquery_v2_client.py
index 363f470..e6cf9c8 100644
--- a/samples/bigquery_sample/bigquery_v2/bigquery_v2_client.py
+++ b/samples/bigquery_sample/bigquery_v2/bigquery_v2_client.py
@@ -24,7 +24,7 @@
                get_credentials=True, http=None, model=None,
                log_request=False, log_response=False,
                credentials_args=None, default_global_params=None,
-               additional_http_headers=None):
+               additional_http_headers=None, response_encoding=None):
     """Create a new bigquery handle."""
     url = url or self.BASE_URL
     super(BigqueryV2, self).__init__(
@@ -33,7 +33,8 @@
         log_request=log_request, log_response=log_response,
         credentials_args=credentials_args,
         default_global_params=default_global_params,
-        additional_http_headers=additional_http_headers)
+        additional_http_headers=additional_http_headers,
+        response_encoding=response_encoding)
     self.datasets = self.DatasetsService(self)
     self.jobs = self.JobsService(self)
     self.projects = self.ProjectsService(self)
@@ -51,7 +52,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name.
+      r"""Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name.
 
       Args:
         request: (BigqueryDatasetsDeleteRequest) input message
@@ -77,7 +78,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Returns the dataset specified by datasetID.
+      r"""Returns the dataset specified by datasetID.
 
       Args:
         request: (BigqueryDatasetsGetRequest) input message
@@ -103,7 +104,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Creates a new empty dataset.
+      r"""Creates a new empty dataset.
 
       Args:
         request: (BigqueryDatasetsInsertRequest) input message
@@ -129,7 +130,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Lists all datasets in the specified project to which you have been granted the READER dataset role.
+      r"""Lists all datasets in the specified project to which you have been granted the READER dataset role.
 
       Args:
         request: (BigqueryDatasetsListRequest) input message
@@ -155,7 +156,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. This method supports patch semantics.
+      r"""Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. This method supports patch semantics.
 
       Args:
         request: (BigqueryDatasetsPatchRequest) input message
@@ -181,7 +182,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource.
+      r"""Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource.
 
       Args:
         request: (BigqueryDatasetsUpdateRequest) input message
@@ -225,7 +226,7 @@
           }
 
     def Cancel(self, request, global_params=None):
-      """Requests that a job be cancelled. This call will return immediately, and the client will need to poll for the job status to see if the cancel completed successfully. Cancelled jobs may still incur costs.
+      r"""Requests that a job be cancelled. This call will return immediately, and the client will need to poll for the job status to see if the cancel completed successfully. Cancelled jobs may still incur costs.
 
       Args:
         request: (BigqueryJobsCancelRequest) input message
@@ -251,7 +252,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role.
+      r"""Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role.
 
       Args:
         request: (BigqueryJobsGetRequest) input message
@@ -277,7 +278,7 @@
     )
 
     def GetQueryResults(self, request, global_params=None):
-      """Retrieves the results of a query job.
+      r"""Retrieves the results of a query job.
 
       Args:
         request: (BigqueryJobsGetQueryResultsRequest) input message
@@ -303,7 +304,7 @@
     )
 
     def Insert(self, request, global_params=None, upload=None):
-      """Starts a new asynchronous job. Requires the Can View project role.
+      r"""Starts a new asynchronous job. Requires the Can View project role.
 
       Args:
         request: (BigqueryJobsInsertRequest) input message
@@ -333,7 +334,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Lists all jobs that you started in the specified project. Job information is available for a six month period after creation. The job list is sorted in reverse chronological order, by job creation time. Requires the Can View project role, or the Is Owner project role if you set the allUsers property.
+      r"""Lists all jobs that you started in the specified project. Job information is available for a six month period after creation. The job list is sorted in reverse chronological order, by job creation time. Requires the Can View project role, or the Is Owner project role if you set the allUsers property.
 
       Args:
         request: (BigqueryJobsListRequest) input message
@@ -359,7 +360,7 @@
     )
 
     def Query(self, request, global_params=None):
-      """Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.
+      r"""Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.
 
       Args:
         request: (BigqueryJobsQueryRequest) input message
@@ -395,7 +396,7 @@
           }
 
     def List(self, request, global_params=None):
-      """Lists all projects to which you have been granted any project role.
+      r"""Lists all projects to which you have been granted any project role.
 
       Args:
         request: (BigqueryProjectsListRequest) input message
@@ -431,7 +432,7 @@
           }
 
     def InsertAll(self, request, global_params=None):
-      """Streams data into BigQuery one record at a time without needing to run a load job. Requires the WRITER dataset role.
+      r"""Streams data into BigQuery one record at a time without needing to run a load job. Requires the WRITER dataset role.
 
       Args:
         request: (BigqueryTabledataInsertAllRequest) input message
@@ -457,7 +458,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves table data from a specified set of rows. Requires the READER dataset role.
+      r"""Retrieves table data from a specified set of rows. Requires the READER dataset role.
 
       Args:
         request: (BigqueryTabledataListRequest) input message
@@ -493,7 +494,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.
+      r"""Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.
 
       Args:
         request: (BigqueryTablesDeleteRequest) input message
@@ -519,7 +520,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.
+      r"""Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.
 
       Args:
         request: (BigqueryTablesGetRequest) input message
@@ -545,7 +546,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Creates a new, empty table in the dataset.
+      r"""Creates a new, empty table in the dataset.
 
       Args:
         request: (BigqueryTablesInsertRequest) input message
@@ -571,7 +572,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Lists all tables in the specified dataset. Requires the READER dataset role.
+      r"""Lists all tables in the specified dataset. Requires the READER dataset role.
 
       Args:
         request: (BigqueryTablesListRequest) input message
@@ -597,7 +598,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource. This method supports patch semantics.
+      r"""Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource. This method supports patch semantics.
 
       Args:
         request: (BigqueryTablesPatchRequest) input message
@@ -623,7 +624,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource.
+      r"""Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource.
 
       Args:
         request: (BigqueryTablesUpdateRequest) input message
diff --git a/samples/bigquery_sample/bigquery_v2/bigquery_v2_messages.py b/samples/bigquery_sample/bigquery_v2/bigquery_v2_messages.py
index 9b68f9c..63a0351 100644
--- a/samples/bigquery_sample/bigquery_v2/bigquery_v2_messages.py
+++ b/samples/bigquery_sample/bigquery_v2/bigquery_v2_messages.py
@@ -13,7 +13,7 @@
 
 
 class BigqueryDatasetsDeleteRequest(_messages.Message):
-  """A BigqueryDatasetsDeleteRequest object.
+  r"""A BigqueryDatasetsDeleteRequest object.
 
   Fields:
     datasetId: Dataset ID of dataset being deleted
@@ -28,11 +28,11 @@
 
 
 class BigqueryDatasetsDeleteResponse(_messages.Message):
-  """An empty BigqueryDatasetsDelete response."""
+  r"""An empty BigqueryDatasetsDelete response."""
 
 
 class BigqueryDatasetsGetRequest(_messages.Message):
-  """A BigqueryDatasetsGetRequest object.
+  r"""A BigqueryDatasetsGetRequest object.
 
   Fields:
     datasetId: Dataset ID of the requested dataset
@@ -44,7 +44,7 @@
 
 
 class BigqueryDatasetsInsertRequest(_messages.Message):
-  """A BigqueryDatasetsInsertRequest object.
+  r"""A BigqueryDatasetsInsertRequest object.
 
   Fields:
     dataset: A Dataset resource to be passed as the request body.
@@ -56,7 +56,7 @@
 
 
 class BigqueryDatasetsListRequest(_messages.Message):
-  """A BigqueryDatasetsListRequest object.
+  r"""A BigqueryDatasetsListRequest object.
 
   Fields:
     all: Whether to list all datasets, including hidden ones
@@ -79,7 +79,7 @@
 
 
 class BigqueryDatasetsPatchRequest(_messages.Message):
-  """A BigqueryDatasetsPatchRequest object.
+  r"""A BigqueryDatasetsPatchRequest object.
 
   Fields:
     dataset: A Dataset resource to be passed as the request body.
@@ -93,7 +93,7 @@
 
 
 class BigqueryDatasetsUpdateRequest(_messages.Message):
-  """A BigqueryDatasetsUpdateRequest object.
+  r"""A BigqueryDatasetsUpdateRequest object.
 
   Fields:
     dataset: A Dataset resource to be passed as the request body.
@@ -107,7 +107,7 @@
 
 
 class BigqueryJobsCancelRequest(_messages.Message):
-  """A BigqueryJobsCancelRequest object.
+  r"""A BigqueryJobsCancelRequest object.
 
   Fields:
     jobId: [Required] Job ID of the job to cancel
@@ -119,7 +119,7 @@
 
 
 class BigqueryJobsGetQueryResultsRequest(_messages.Message):
-  """A BigqueryJobsGetQueryResultsRequest object.
+  r"""A BigqueryJobsGetQueryResultsRequest object.
 
   Fields:
     jobId: [Required] Job ID of the query job
@@ -142,7 +142,7 @@
 
 
 class BigqueryJobsGetRequest(_messages.Message):
-  """A BigqueryJobsGetRequest object.
+  r"""A BigqueryJobsGetRequest object.
 
   Fields:
     jobId: [Required] Job ID of the requested job
@@ -154,7 +154,7 @@
 
 
 class BigqueryJobsInsertRequest(_messages.Message):
-  """A BigqueryJobsInsertRequest object.
+  r"""A BigqueryJobsInsertRequest object.
 
   Fields:
     job: A Job resource to be passed as the request body.
@@ -166,7 +166,7 @@
 
 
 class BigqueryJobsListRequest(_messages.Message):
-  """A BigqueryJobsListRequest object.
+  r"""A BigqueryJobsListRequest object.
 
   Enums:
     ProjectionValueValuesEnum: Restrict information returned to a set of
@@ -185,7 +185,7 @@
   """
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Restrict information returned to a set of selected fields
+    r"""Restrict information returned to a set of selected fields
 
     Values:
       full: Includes all job data
@@ -195,7 +195,7 @@
     minimal = 1
 
   class StateFilterValueValuesEnum(_messages.Enum):
-    """Filter for job state
+    r"""Filter for job state
 
     Values:
       done: Finished jobs
@@ -215,7 +215,7 @@
 
 
 class BigqueryJobsQueryRequest(_messages.Message):
-  """A BigqueryJobsQueryRequest object.
+  r"""A BigqueryJobsQueryRequest object.
 
   Fields:
     projectId: Project ID of the project billed for the query
@@ -227,7 +227,7 @@
 
 
 class BigqueryProjectsListRequest(_messages.Message):
-  """A BigqueryProjectsListRequest object.
+  r"""A BigqueryProjectsListRequest object.
 
   Fields:
     maxResults: Maximum number of results to return
@@ -240,7 +240,7 @@
 
 
 class BigqueryTabledataInsertAllRequest(_messages.Message):
-  """A BigqueryTabledataInsertAllRequest object.
+  r"""A BigqueryTabledataInsertAllRequest object.
 
   Fields:
     datasetId: Dataset ID of the destination table.
@@ -257,7 +257,7 @@
 
 
 class BigqueryTabledataListRequest(_messages.Message):
-  """A BigqueryTabledataListRequest object.
+  r"""A BigqueryTabledataListRequest object.
 
   Fields:
     datasetId: Dataset ID of the table to read
@@ -278,7 +278,7 @@
 
 
 class BigqueryTablesDeleteRequest(_messages.Message):
-  """A BigqueryTablesDeleteRequest object.
+  r"""A BigqueryTablesDeleteRequest object.
 
   Fields:
     datasetId: Dataset ID of the table to delete
@@ -292,11 +292,11 @@
 
 
 class BigqueryTablesDeleteResponse(_messages.Message):
-  """An empty BigqueryTablesDelete response."""
+  r"""An empty BigqueryTablesDelete response."""
 
 
 class BigqueryTablesGetRequest(_messages.Message):
-  """A BigqueryTablesGetRequest object.
+  r"""A BigqueryTablesGetRequest object.
 
   Fields:
     datasetId: Dataset ID of the requested table
@@ -310,7 +310,7 @@
 
 
 class BigqueryTablesInsertRequest(_messages.Message):
-  """A BigqueryTablesInsertRequest object.
+  r"""A BigqueryTablesInsertRequest object.
 
   Fields:
     datasetId: Dataset ID of the new table
@@ -324,7 +324,7 @@
 
 
 class BigqueryTablesListRequest(_messages.Message):
-  """A BigqueryTablesListRequest object.
+  r"""A BigqueryTablesListRequest object.
 
   Fields:
     datasetId: Dataset ID of the tables to list
@@ -341,7 +341,7 @@
 
 
 class BigqueryTablesPatchRequest(_messages.Message):
-  """A BigqueryTablesPatchRequest object.
+  r"""A BigqueryTablesPatchRequest object.
 
   Fields:
     datasetId: Dataset ID of the table to update
@@ -357,7 +357,7 @@
 
 
 class BigqueryTablesUpdateRequest(_messages.Message):
-  """A BigqueryTablesUpdateRequest object.
+  r"""A BigqueryTablesUpdateRequest object.
 
   Fields:
     datasetId: Dataset ID of the table to update
@@ -373,7 +373,7 @@
 
 
 class BigtableColumn(_messages.Message):
-  """A BigtableColumn object.
+  r"""A BigtableColumn object.
 
   Fields:
     encoding: [Optional] The encoding of the values when the type is not
@@ -417,7 +417,7 @@
 
 
 class BigtableColumnFamily(_messages.Message):
-  """A BigtableColumnFamily object.
+  r"""A BigtableColumnFamily object.
 
   Fields:
     columns: [Optional] Lists of columns that should be exposed as individual
@@ -451,7 +451,7 @@
 
 
 class BigtableOptions(_messages.Message):
-  """A BigtableOptions object.
+  r"""A BigtableOptions object.
 
   Fields:
     columnFamilies: [Optional] List of column families to expose in the table
@@ -478,7 +478,7 @@
 
 
 class CsvOptions(_messages.Message):
-  """A CsvOptions object.
+  r"""A CsvOptions object.
 
   Fields:
     allowJaggedRows: [Optional] Indicates if BigQuery should accept rows that
@@ -521,7 +521,7 @@
 
 
 class Dataset(_messages.Message):
-  """A Dataset object.
+  r"""A Dataset object.
 
   Messages:
     AccessValueListEntry: A AccessValueListEntry object.
@@ -582,7 +582,7 @@
   """
 
   class AccessValueListEntry(_messages.Message):
-    """A AccessValueListEntry object.
+    r"""A AccessValueListEntry object.
 
     Fields:
       domain: [Pick one] A domain to grant access to. Any users signed in with
@@ -616,7 +616,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class LabelsValue(_messages.Message):
-    """[Experimental] The labels associated with this dataset. You can use
+    r"""[Experimental] The labels associated with this dataset. You can use
     these to organize and group your datasets. You can set this property when
     inserting or updating a dataset. Label keys and values can be no longer
     than 63 characters, can only contain letters, numeric characters,
@@ -633,7 +633,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a LabelsValue object.
+      r"""An additional property for a LabelsValue object.
 
       Fields:
         key: Name of the additional property.
@@ -661,7 +661,7 @@
 
 
 class DatasetList(_messages.Message):
-  """A DatasetList object.
+  r"""A DatasetList object.
 
   Messages:
     DatasetsValueListEntry: A DatasetsValueListEntry object.
@@ -680,7 +680,7 @@
   """
 
   class DatasetsValueListEntry(_messages.Message):
-    """A DatasetsValueListEntry object.
+    r"""A DatasetsValueListEntry object.
 
     Messages:
       LabelsValue: [Experimental] The labels associated with this dataset. You
@@ -699,7 +699,7 @@
 
     @encoding.MapUnrecognizedFields('additionalProperties')
     class LabelsValue(_messages.Message):
-      """[Experimental] The labels associated with this dataset. You can use
+      r"""[Experimental] The labels associated with this dataset. You can use
       these to organize and group your datasets.
 
       Messages:
@@ -710,7 +710,7 @@
       """
 
       class AdditionalProperty(_messages.Message):
-        """An additional property for a LabelsValue object.
+        r"""An additional property for a LabelsValue object.
 
         Fields:
           key: Name of the additional property.
@@ -735,7 +735,7 @@
 
 
 class DatasetReference(_messages.Message):
-  """A DatasetReference object.
+  r"""A DatasetReference object.
 
   Fields:
     datasetId: [Required] A unique ID for this dataset, without the project
@@ -749,7 +749,7 @@
 
 
 class ErrorProto(_messages.Message):
-  """A ErrorProto object.
+  r"""A ErrorProto object.
 
   Fields:
     debugInfo: Debugging information. This property is internal to Google and
@@ -766,7 +766,7 @@
 
 
 class ExplainQueryStage(_messages.Message):
-  """A ExplainQueryStage object.
+  r"""A ExplainQueryStage object.
 
   Fields:
     computeRatioAvg: Relative amount of time the average shard spent on CPU-
@@ -809,7 +809,7 @@
 
 
 class ExplainQueryStep(_messages.Message):
-  """A ExplainQueryStep object.
+  r"""A ExplainQueryStep object.
 
   Fields:
     kind: Machine-readable operation type.
@@ -821,7 +821,7 @@
 
 
 class ExternalDataConfiguration(_messages.Message):
-  """A ExternalDataConfiguration object.
+  r"""A ExternalDataConfiguration object.
 
   Fields:
     autodetect: [Experimental] Try to detect schema and format options
@@ -885,7 +885,7 @@
 
 
 class GetQueryResultsResponse(_messages.Message):
-  """A GetQueryResultsResponse object.
+  r"""A GetQueryResultsResponse object.
 
   Fields:
     cacheHit: Whether the query result was fetched from the query cache.
@@ -934,7 +934,7 @@
 
 
 class GoogleSheetsOptions(_messages.Message):
-  """A GoogleSheetsOptions object.
+  r"""A GoogleSheetsOptions object.
 
   Fields:
     skipLeadingRows: [Optional] The number of rows at the top of a sheet that
@@ -955,7 +955,7 @@
 
 
 class Job(_messages.Message):
-  """A Job object.
+  r"""A Job object.
 
   Fields:
     configuration: [Required] Describes the job configuration.
@@ -985,7 +985,7 @@
 
 
 class JobCancelResponse(_messages.Message):
-  """A JobCancelResponse object.
+  r"""A JobCancelResponse object.
 
   Fields:
     job: The final state of the job.
@@ -997,7 +997,7 @@
 
 
 class JobConfiguration(_messages.Message):
-  """A JobConfiguration object.
+  r"""A JobConfiguration object.
 
   Fields:
     copy: [Pick one] Copies a table.
@@ -1018,7 +1018,7 @@
 
 
 class JobConfigurationExtract(_messages.Message):
-  """A JobConfigurationExtract object.
+  r"""A JobConfigurationExtract object.
 
   Fields:
     compression: [Optional] The compression type to use for exported files.
@@ -1048,7 +1048,7 @@
 
 
 class JobConfigurationLoad(_messages.Message):
-  """A JobConfigurationLoad object.
+  r"""A JobConfigurationLoad object.
 
   Fields:
     allowJaggedRows: [Optional] Accept rows that are missing trailing optional
@@ -1167,7 +1167,7 @@
 
 
 class JobConfigurationQuery(_messages.Message):
-  """A JobConfigurationQuery object.
+  r"""A JobConfigurationQuery object.
 
   Messages:
     TableDefinitionsValue: [Optional] If querying an external data source
@@ -1247,7 +1247,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class TableDefinitionsValue(_messages.Message):
-    """[Optional] If querying an external data source outside of BigQuery,
+    r"""[Optional] If querying an external data source outside of BigQuery,
     describes the data format, location and other properties of the data
     source. By defining these properties, the data source can then be queried
     as if it were a standard BigQuery table.
@@ -1262,7 +1262,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a TableDefinitionsValue object.
+      r"""An additional property for a TableDefinitionsValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1293,7 +1293,7 @@
 
 
 class JobConfigurationTableCopy(_messages.Message):
-  """A JobConfigurationTableCopy object.
+  r"""A JobConfigurationTableCopy object.
 
   Fields:
     createDisposition: [Optional] Specifies whether the job is allowed to
@@ -1325,7 +1325,7 @@
 
 
 class JobList(_messages.Message):
-  """A JobList object.
+  r"""A JobList object.
 
   Messages:
     JobsValueListEntry: A JobsValueListEntry object.
@@ -1338,7 +1338,7 @@
   """
 
   class JobsValueListEntry(_messages.Message):
-    """A JobsValueListEntry object.
+    r"""A JobsValueListEntry object.
 
     Fields:
       configuration: [Full-projection-only] Specifies the job configuration.
@@ -1373,7 +1373,7 @@
 
 
 class JobReference(_messages.Message):
-  """A JobReference object.
+  r"""A JobReference object.
 
   Fields:
     jobId: [Required] The ID of the job. The ID must contain only letters
@@ -1387,7 +1387,7 @@
 
 
 class JobStatistics(_messages.Message):
-  """A JobStatistics object.
+  r"""A JobStatistics object.
 
   Fields:
     creationTime: [Output-only] Creation time of this job, in milliseconds
@@ -1414,7 +1414,7 @@
 
 
 class JobStatistics2(_messages.Message):
-  """A JobStatistics2 object.
+  r"""A JobStatistics2 object.
 
   Fields:
     billingTier: [Output-only] Billing tier for the job.
@@ -1445,7 +1445,7 @@
 
 
 class JobStatistics3(_messages.Message):
-  """A JobStatistics3 object.
+  r"""A JobStatistics3 object.
 
   Fields:
     inputFileBytes: [Output-only] Number of bytes of source data in a load
@@ -1464,7 +1464,7 @@
 
 
 class JobStatistics4(_messages.Message):
-  """A JobStatistics4 object.
+  r"""A JobStatistics4 object.
 
   Fields:
     destinationUriFileCounts: [Output-only] Number of files per destination
@@ -1477,7 +1477,7 @@
 
 
 class JobStatus(_messages.Message):
-  """A JobStatus object.
+  r"""A JobStatus object.
 
   Fields:
     errorResult: [Output-only] Final error result of the job. If present,
@@ -1495,7 +1495,7 @@
 
 @encoding.MapUnrecognizedFields('additionalProperties')
 class JsonObject(_messages.Message):
-  """Represents a single JSON object.
+  r"""Represents a single JSON object.
 
   Messages:
     AdditionalProperty: An additional property for a JsonObject object.
@@ -1505,7 +1505,7 @@
   """
 
   class AdditionalProperty(_messages.Message):
-    """An additional property for a JsonObject object.
+    r"""An additional property for a JsonObject object.
 
     Fields:
       key: Name of the additional property.
@@ -1522,7 +1522,7 @@
 
 
 class ProjectList(_messages.Message):
-  """A ProjectList object.
+  r"""A ProjectList object.
 
   Messages:
     ProjectsValueListEntry: A ProjectsValueListEntry object.
@@ -1536,7 +1536,7 @@
   """
 
   class ProjectsValueListEntry(_messages.Message):
-    """A ProjectsValueListEntry object.
+    r"""A ProjectsValueListEntry object.
 
     Fields:
       friendlyName: A descriptive name for this project.
@@ -1560,7 +1560,7 @@
 
 
 class ProjectReference(_messages.Message):
-  """A ProjectReference object.
+  r"""A ProjectReference object.
 
   Fields:
     projectId: [Required] ID of the project. Can be either the numeric ID or
@@ -1571,7 +1571,7 @@
 
 
 class QueryRequest(_messages.Message):
-  """A QueryRequest object.
+  r"""A QueryRequest object.
 
   Fields:
     defaultDataset: [Optional] Specifies the default datasetId and projectId
@@ -1624,7 +1624,7 @@
 
 
 class QueryResponse(_messages.Message):
-  """A QueryResponse object.
+  r"""A QueryResponse object.
 
   Fields:
     cacheHit: Whether the query result was fetched from the query cache.
@@ -1672,7 +1672,7 @@
 
 
 class StandardQueryParameters(_messages.Message):
-  """Query parameters accepted by all methods.
+  r"""Query parameters accepted by all methods.
 
   Enums:
     AltValueValuesEnum: Data format for the response.
@@ -1695,7 +1695,7 @@
   """
 
   class AltValueValuesEnum(_messages.Enum):
-    """Data format for the response.
+    r"""Data format for the response.
 
     Values:
       json: Responses with Content-Type of application/json
@@ -1713,7 +1713,7 @@
 
 
 class Streamingbuffer(_messages.Message):
-  """A Streamingbuffer object.
+  r"""A Streamingbuffer object.
 
   Fields:
     estimatedBytes: [Output-only] A lower-bound estimate of the number of
@@ -1731,7 +1731,7 @@
 
 
 class Table(_messages.Message):
-  """A Table object.
+  r"""A Table object.
 
   Fields:
     creationTime: [Output-only] The time when this table was created, in
@@ -1800,7 +1800,7 @@
 
 
 class TableCell(_messages.Message):
-  """A TableCell object.
+  r"""A TableCell object.
 
   Fields:
     v: A extra_types.JsonValue attribute.
@@ -1810,7 +1810,7 @@
 
 
 class TableDataInsertAllRequest(_messages.Message):
-  """A TableDataInsertAllRequest object.
+  r"""A TableDataInsertAllRequest object.
 
   Messages:
     RowsValueListEntry: A RowsValueListEntry object.
@@ -1833,7 +1833,7 @@
   """
 
   class RowsValueListEntry(_messages.Message):
-    """A RowsValueListEntry object.
+    r"""A RowsValueListEntry object.
 
     Fields:
       insertId: [Optional] A unique ID for each row. BigQuery uses this
@@ -1854,7 +1854,7 @@
 
 
 class TableDataInsertAllResponse(_messages.Message):
-  """A TableDataInsertAllResponse object.
+  r"""A TableDataInsertAllResponse object.
 
   Messages:
     InsertErrorsValueListEntry: A InsertErrorsValueListEntry object.
@@ -1865,7 +1865,7 @@
   """
 
   class InsertErrorsValueListEntry(_messages.Message):
-    """A InsertErrorsValueListEntry object.
+    r"""A InsertErrorsValueListEntry object.
 
     Fields:
       errors: Error information for the row indicated by the index property.
@@ -1880,7 +1880,7 @@
 
 
 class TableDataList(_messages.Message):
-  """A TableDataList object.
+  r"""A TableDataList object.
 
   Fields:
     etag: A hash of this page of results.
@@ -1900,7 +1900,7 @@
 
 
 class TableFieldSchema(_messages.Message):
-  """A TableFieldSchema object.
+  r"""A TableFieldSchema object.
 
   Fields:
     description: [Optional] The field description. The maximum length is 16K
@@ -1925,7 +1925,7 @@
 
 
 class TableList(_messages.Message):
-  """A TableList object.
+  r"""A TableList object.
 
   Messages:
     TablesValueListEntry: A TablesValueListEntry object.
@@ -1939,7 +1939,7 @@
   """
 
   class TablesValueListEntry(_messages.Message):
-    """A TablesValueListEntry object.
+    r"""A TablesValueListEntry object.
 
     Fields:
       friendlyName: The user-friendly name for this table.
@@ -1963,7 +1963,7 @@
 
 
 class TableReference(_messages.Message):
-  """A TableReference object.
+  r"""A TableReference object.
 
   Fields:
     datasetId: [Required] The ID of the dataset containing this table.
@@ -1979,7 +1979,7 @@
 
 
 class TableRow(_messages.Message):
-  """A TableRow object.
+  r"""A TableRow object.
 
   Fields:
     f: Represents a single row in the result set, consisting of one or more
@@ -1990,7 +1990,7 @@
 
 
 class TableSchema(_messages.Message):
-  """A TableSchema object.
+  r"""A TableSchema object.
 
   Fields:
     fields: Describes the fields in a table.
@@ -2000,7 +2000,7 @@
 
 
 class TimePartitioning(_messages.Message):
-  """A TimePartitioning object.
+  r"""A TimePartitioning object.
 
   Fields:
     expirationMs: [Optional] Number of milliseconds for which to keep the
@@ -2014,7 +2014,7 @@
 
 
 class UserDefinedFunctionResource(_messages.Message):
-  """A UserDefinedFunctionResource object.
+  r"""A UserDefinedFunctionResource object.
 
   Fields:
     inlineCode: [Pick one] An inline resource that contains code for a user-
@@ -2029,7 +2029,7 @@
 
 
 class ViewDefinition(_messages.Message):
-  """A ViewDefinition object.
+  r"""A ViewDefinition object.
 
   Fields:
     query: [Required] A query that BigQuery executes when the view is
diff --git a/samples/dns_sample/dns_v1/dns_v1.py b/samples/dns_sample/dns_v1/dns_v1.py
deleted file mode 100644
index 56e54f3..0000000
--- a/samples/dns_sample/dns_v1/dns_v1.py
+++ /dev/null
@@ -1,554 +0,0 @@
-#!/usr/bin/env python
-"""CLI for dns, version v1."""
-# NOTE: This file is autogenerated and should not be edited by hand.
-
-import code
-import os
-import platform
-import sys
-
-from apitools.base.protorpclite import message_types
-from apitools.base.protorpclite import messages
-
-from google.apputils import appcommands
-import gflags as flags
-
-import apitools.base.py as apitools_base
-from apitools.base.py import cli as apitools_base_cli
-import dns_v1_client as client_lib
-import dns_v1_messages as messages
-
-
-def _DeclareDnsFlags():
-  """Declare global flags in an idempotent way."""
-  if 'api_endpoint' in flags.FLAGS:
-    return
-  flags.DEFINE_string(
-      'api_endpoint',
-      u'https://www.googleapis.com/dns/v1/',
-      'URL of the API endpoint to use.',
-      short_name='dns_url')
-  flags.DEFINE_string(
-      'history_file',
-      u'~/.dns.v1.history',
-      'File with interactive shell history.')
-  flags.DEFINE_multistring(
-      'add_header', [],
-      'Additional http headers (as key=value strings). '
-      'Can be specified multiple times.')
-  flags.DEFINE_string(
-      'service_account_json_keyfile', '',
-      'Filename for a JSON service account key downloaded'
-      ' from the Developer Console.')
-  flags.DEFINE_enum(
-      'alt',
-      u'json',
-      [u'json'],
-      u'Data format for the response.')
-  flags.DEFINE_string(
-      'fields',
-      None,
-      u'Selector specifying which fields to include in a partial response.')
-  flags.DEFINE_string(
-      'key',
-      None,
-      u'API key. Your API key identifies your project and provides you with '
-      u'API access, quota, and reports. Required unless you provide an OAuth '
-      u'2.0 token.')
-  flags.DEFINE_string(
-      'oauth_token',
-      None,
-      u'OAuth 2.0 token for the current user.')
-  flags.DEFINE_boolean(
-      'prettyPrint',
-      'True',
-      u'Returns response with indentations and line breaks.')
-  flags.DEFINE_string(
-      'quotaUser',
-      None,
-      u'Available to use for quota purposes for server-side applications. Can'
-      u' be any arbitrary string assigned to a user, but should not exceed 40'
-      u' characters. Overrides userIp if both are provided.')
-  flags.DEFINE_string(
-      'trace',
-      None,
-      'A tracing token of the form "token:<tokenid>" to include in api '
-      'requests.')
-  flags.DEFINE_string(
-      'userIp',
-      None,
-      u'IP address of the site where the request originates. Use this if you '
-      u'want to enforce per-user limits.')
-
-
-FLAGS = flags.FLAGS
-apitools_base_cli.DeclareBaseFlags()
-_DeclareDnsFlags()
-
-
-def GetGlobalParamsFromFlags():
-  """Return a StandardQueryParameters based on flags."""
-  result = messages.StandardQueryParameters()
-  if FLAGS['alt'].present:
-    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
-  if FLAGS['fields'].present:
-    result.fields = FLAGS.fields.decode('utf8')
-  if FLAGS['key'].present:
-    result.key = FLAGS.key.decode('utf8')
-  if FLAGS['oauth_token'].present:
-    result.oauth_token = FLAGS.oauth_token.decode('utf8')
-  if FLAGS['prettyPrint'].present:
-    result.prettyPrint = FLAGS.prettyPrint
-  if FLAGS['quotaUser'].present:
-    result.quotaUser = FLAGS.quotaUser.decode('utf8')
-  if FLAGS['trace'].present:
-    result.trace = FLAGS.trace.decode('utf8')
-  if FLAGS['userIp'].present:
-    result.userIp = FLAGS.userIp.decode('utf8')
-  return result
-
-
-def GetClientFromFlags():
-  """Return a client object, configured from flags."""
-  log_request = FLAGS.log_request or FLAGS.log_request_response
-  log_response = FLAGS.log_response or FLAGS.log_request_response
-  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
-  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
-  credentials_args = {
-      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
-  }
-  try:
-    client = client_lib.DnsV1(
-        api_endpoint, log_request=log_request,
-        log_response=log_response,
-        credentials_args=credentials_args,
-        additional_http_headers=additional_http_headers)
-  except apitools_base.CredentialsError as e:
-    print 'Error creating credentials: %s' % e
-    sys.exit(1)
-  return client
-
-
-class PyShell(appcommands.Cmd):
-
-  def Run(self, _):
-    """Run an interactive python shell with the client."""
-    client = GetClientFromFlags()
-    params = GetGlobalParamsFromFlags()
-    for field in params.all_fields():
-      value = params.get_assigned_value(field.name)
-      if value != field.default:
-        client.AddGlobalParam(field.name, value)
-    banner = """
-           == dns interactive console ==
-                 client: a dns client
-          apitools_base: base apitools module
-         messages: the generated messages module
-    """
-    local_vars = {
-        'apitools_base': apitools_base,
-        'client': client,
-        'client_lib': client_lib,
-        'messages': messages,
-    }
-    if platform.system() == 'Linux':
-      console = apitools_base_cli.ConsoleWithReadline(
-          local_vars, histfile=FLAGS.history_file)
-    else:
-      console = code.InteractiveConsole(local_vars)
-    try:
-      console.interact(banner)
-    except SystemExit as e:
-      return e.code
-
-
-class ChangesCreate(apitools_base_cli.NewCmd):
-  """Command wrapping changes.Create."""
-
-  usage = """changes_create <project> <managedZone>"""
-
-  def __init__(self, name, fv):
-    super(ChangesCreate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'change',
-        None,
-        u'A Change resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, project, managedZone):
-    """Atomically update the ResourceRecordSet collection.
-
-    Args:
-      project: Identifies the project addressed by this request.
-      managedZone: Identifies the managed zone addressed by this request. Can
-        be the managed zone name or id.
-
-    Flags:
-      change: A Change resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.DnsChangesCreateRequest(
-        project=project.decode('utf8'),
-        managedZone=managedZone.decode('utf8'),
-        )
-    if FLAGS['change'].present:
-      request.change = apitools_base.JsonToMessage(messages.Change, FLAGS.change)
-    result = client.changes.Create(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ChangesGet(apitools_base_cli.NewCmd):
-  """Command wrapping changes.Get."""
-
-  usage = """changes_get <project> <managedZone> <changeId>"""
-
-  def __init__(self, name, fv):
-    super(ChangesGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, project, managedZone, changeId):
-    """Fetch the representation of an existing Change.
-
-    Args:
-      project: Identifies the project addressed by this request.
-      managedZone: Identifies the managed zone addressed by this request. Can
-        be the managed zone name or id.
-      changeId: The identifier of the requested change, from a previous
-        ResourceRecordSetsChangeResponse.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.DnsChangesGetRequest(
-        project=project.decode('utf8'),
-        managedZone=managedZone.decode('utf8'),
-        changeId=changeId.decode('utf8'),
-        )
-    result = client.changes.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ChangesList(apitools_base_cli.NewCmd):
-  """Command wrapping changes.List."""
-
-  usage = """changes_list <project> <managedZone>"""
-
-  def __init__(self, name, fv):
-    super(ChangesList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Optional. Maximum number of results to be returned. If unspecified,'
-        u' the server will decide how many results to return.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Optional. A tag returned by a previous list request that was '
-        u'truncated. Use this parameter to continue a previous list request.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'sortBy',
-        u'changeSequence',
-        [u'changeSequence'],
-        u'Sorting criterion. The only supported value is change sequence.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'sortOrder',
-        None,
-        u"Sorting order direction: 'ascending' or 'descending'.",
-        flag_values=fv)
-
-  def RunWithArgs(self, project, managedZone):
-    """Enumerate Changes to a ResourceRecordSet collection.
-
-    Args:
-      project: Identifies the project addressed by this request.
-      managedZone: Identifies the managed zone addressed by this request. Can
-        be the managed zone name or id.
-
-    Flags:
-      maxResults: Optional. Maximum number of results to be returned. If
-        unspecified, the server will decide how many results to return.
-      pageToken: Optional. A tag returned by a previous list request that was
-        truncated. Use this parameter to continue a previous list request.
-      sortBy: Sorting criterion. The only supported value is change sequence.
-      sortOrder: Sorting order direction: 'ascending' or 'descending'.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.DnsChangesListRequest(
-        project=project.decode('utf8'),
-        managedZone=managedZone.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['sortBy'].present:
-      request.sortBy = messages.DnsChangesListRequest.SortByValueValuesEnum(FLAGS.sortBy)
-    if FLAGS['sortOrder'].present:
-      request.sortOrder = FLAGS.sortOrder.decode('utf8')
-    result = client.changes.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ManagedZonesCreate(apitools_base_cli.NewCmd):
-  """Command wrapping managedZones.Create."""
-
-  usage = """managedZones_create <project>"""
-
-  def __init__(self, name, fv):
-    super(ManagedZonesCreate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'managedZone',
-        None,
-        u'A ManagedZone resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, project):
-    """Create a new ManagedZone.
-
-    Args:
-      project: Identifies the project addressed by this request.
-
-    Flags:
-      managedZone: A ManagedZone resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.DnsManagedZonesCreateRequest(
-        project=project.decode('utf8'),
-        )
-    if FLAGS['managedZone'].present:
-      request.managedZone = apitools_base.JsonToMessage(messages.ManagedZone, FLAGS.managedZone)
-    result = client.managedZones.Create(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ManagedZonesDelete(apitools_base_cli.NewCmd):
-  """Command wrapping managedZones.Delete."""
-
-  usage = """managedZones_delete <project> <managedZone>"""
-
-  def __init__(self, name, fv):
-    super(ManagedZonesDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, project, managedZone):
-    """Delete a previously created ManagedZone.
-
-    Args:
-      project: Identifies the project addressed by this request.
-      managedZone: Identifies the managed zone addressed by this request. Can
-        be the managed zone name or id.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.DnsManagedZonesDeleteRequest(
-        project=project.decode('utf8'),
-        managedZone=managedZone.decode('utf8'),
-        )
-    result = client.managedZones.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ManagedZonesGet(apitools_base_cli.NewCmd):
-  """Command wrapping managedZones.Get."""
-
-  usage = """managedZones_get <project> <managedZone>"""
-
-  def __init__(self, name, fv):
-    super(ManagedZonesGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, project, managedZone):
-    """Fetch the representation of an existing ManagedZone.
-
-    Args:
-      project: Identifies the project addressed by this request.
-      managedZone: Identifies the managed zone addressed by this request. Can
-        be the managed zone name or id.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.DnsManagedZonesGetRequest(
-        project=project.decode('utf8'),
-        managedZone=managedZone.decode('utf8'),
-        )
-    result = client.managedZones.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ManagedZonesList(apitools_base_cli.NewCmd):
-  """Command wrapping managedZones.List."""
-
-  usage = """managedZones_list <project>"""
-
-  def __init__(self, name, fv):
-    super(ManagedZonesList, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'dnsName',
-        None,
-        u'Restricts the list to return only zones with this domain name.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Optional. Maximum number of results to be returned. If unspecified,'
-        u' the server will decide how many results to return.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Optional. A tag returned by a previous list request that was '
-        u'truncated. Use this parameter to continue a previous list request.',
-        flag_values=fv)
-
-  def RunWithArgs(self, project):
-    """Enumerate ManagedZones that have been created but not yet deleted.
-
-    Args:
-      project: Identifies the project addressed by this request.
-
-    Flags:
-      dnsName: Restricts the list to return only zones with this domain name.
-      maxResults: Optional. Maximum number of results to be returned. If
-        unspecified, the server will decide how many results to return.
-      pageToken: Optional. A tag returned by a previous list request that was
-        truncated. Use this parameter to continue a previous list request.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.DnsManagedZonesListRequest(
-        project=project.decode('utf8'),
-        )
-    if FLAGS['dnsName'].present:
-      request.dnsName = FLAGS.dnsName.decode('utf8')
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.managedZones.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsGet(apitools_base_cli.NewCmd):
-  """Command wrapping projects.Get."""
-
-  usage = """projects_get <project>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, project):
-    """Fetch the representation of an existing Project.
-
-    Args:
-      project: Identifies the project addressed by this request.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.DnsProjectsGetRequest(
-        project=project.decode('utf8'),
-        )
-    result = client.projects.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ResourceRecordSetsList(apitools_base_cli.NewCmd):
-  """Command wrapping resourceRecordSets.List."""
-
-  usage = """resourceRecordSets_list <project> <managedZone>"""
-
-  def __init__(self, name, fv):
-    super(ResourceRecordSetsList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Optional. Maximum number of results to be returned. If unspecified,'
-        u' the server will decide how many results to return.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'name',
-        None,
-        u'Restricts the list to return only records with this fully qualified'
-        u' domain name.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Optional. A tag returned by a previous list request that was '
-        u'truncated. Use this parameter to continue a previous list request.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'type',
-        None,
-        u'Restricts the list to return only records of this type. If present,'
-        u' the "name" parameter must also be present.',
-        flag_values=fv)
-
-  def RunWithArgs(self, project, managedZone):
-    """Enumerate ResourceRecordSets that have been created but not yet
-    deleted.
-
-    Args:
-      project: Identifies the project addressed by this request.
-      managedZone: Identifies the managed zone addressed by this request. Can
-        be the managed zone name or id.
-
-    Flags:
-      maxResults: Optional. Maximum number of results to be returned. If
-        unspecified, the server will decide how many results to return.
-      name: Restricts the list to return only records with this fully
-        qualified domain name.
-      pageToken: Optional. A tag returned by a previous list request that was
-        truncated. Use this parameter to continue a previous list request.
-      type: Restricts the list to return only records of this type. If
-        present, the "name" parameter must also be present.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.DnsResourceRecordSetsListRequest(
-        project=project.decode('utf8'),
-        managedZone=managedZone.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['name'].present:
-      request.name = FLAGS.name.decode('utf8')
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['type'].present:
-      request.type = FLAGS.type.decode('utf8')
-    result = client.resourceRecordSets.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-def main(_):
-  appcommands.AddCmd('pyshell', PyShell)
-  appcommands.AddCmd('changes_create', ChangesCreate)
-  appcommands.AddCmd('changes_get', ChangesGet)
-  appcommands.AddCmd('changes_list', ChangesList)
-  appcommands.AddCmd('managedZones_create', ManagedZonesCreate)
-  appcommands.AddCmd('managedZones_delete', ManagedZonesDelete)
-  appcommands.AddCmd('managedZones_get', ManagedZonesGet)
-  appcommands.AddCmd('managedZones_list', ManagedZonesList)
-  appcommands.AddCmd('projects_get', ProjectsGet)
-  appcommands.AddCmd('resourceRecordSets_list', ResourceRecordSetsList)
-
-  apitools_base_cli.SetupLogger()
-  if hasattr(appcommands, 'SetDefaultCommand'):
-    appcommands.SetDefaultCommand('pyshell')
-
-
-run_main = apitools_base_cli.run_main
-
-if __name__ == '__main__':
-  appcommands.Run()
diff --git a/samples/dns_sample/dns_v1/dns_v1_client.py b/samples/dns_sample/dns_v1/dns_v1_client.py
index b15403c..ce3aff6 100644
--- a/samples/dns_sample/dns_v1/dns_v1_client.py
+++ b/samples/dns_sample/dns_v1/dns_v1_client.py
@@ -24,7 +24,7 @@
                get_credentials=True, http=None, model=None,
                log_request=False, log_response=False,
                credentials_args=None, default_global_params=None,
-               additional_http_headers=None):
+               additional_http_headers=None, response_encoding=None):
     """Create a new dns handle."""
     url = url or self.BASE_URL
     super(DnsV1, self).__init__(
@@ -33,7 +33,8 @@
         log_request=log_request, log_response=log_response,
         credentials_args=credentials_args,
         default_global_params=default_global_params,
-        additional_http_headers=additional_http_headers)
+        additional_http_headers=additional_http_headers,
+        response_encoding=response_encoding)
     self.changes = self.ChangesService(self)
     self.managedZones = self.ManagedZonesService(self)
     self.projects = self.ProjectsService(self)
@@ -50,7 +51,7 @@
           }
 
     def Create(self, request, global_params=None):
-      """Atomically update the ResourceRecordSet collection.
+      r"""Atomically update the ResourceRecordSet collection.
 
       Args:
         request: (DnsChangesCreateRequest) input message
@@ -76,7 +77,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Fetch the representation of an existing Change.
+      r"""Fetch the representation of an existing Change.
 
       Args:
         request: (DnsChangesGetRequest) input message
@@ -102,7 +103,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Enumerate Changes to a ResourceRecordSet collection.
+      r"""Enumerate Changes to a ResourceRecordSet collection.
 
       Args:
         request: (DnsChangesListRequest) input message
@@ -138,7 +139,7 @@
           }
 
     def Create(self, request, global_params=None):
-      """Create a new ManagedZone.
+      r"""Create a new ManagedZone.
 
       Args:
         request: (DnsManagedZonesCreateRequest) input message
@@ -164,7 +165,7 @@
     )
 
     def Delete(self, request, global_params=None):
-      """Delete a previously created ManagedZone.
+      r"""Delete a previously created ManagedZone.
 
       Args:
         request: (DnsManagedZonesDeleteRequest) input message
@@ -190,7 +191,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Fetch the representation of an existing ManagedZone.
+      r"""Fetch the representation of an existing ManagedZone.
 
       Args:
         request: (DnsManagedZonesGetRequest) input message
@@ -216,7 +217,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Enumerate ManagedZones that have been created but not yet deleted.
+      r"""Enumerate ManagedZones that have been created but not yet deleted.
 
       Args:
         request: (DnsManagedZonesListRequest) input message
@@ -252,7 +253,7 @@
           }
 
     def Get(self, request, global_params=None):
-      """Fetch the representation of an existing Project.
+      r"""Fetch the representation of an existing Project.
 
       Args:
         request: (DnsProjectsGetRequest) input message
@@ -288,7 +289,7 @@
           }
 
     def List(self, request, global_params=None):
-      """Enumerate ResourceRecordSets that have been created but not yet deleted.
+      r"""Enumerate ResourceRecordSets that have been created but not yet deleted.
 
       Args:
         request: (DnsResourceRecordSetsListRequest) input message
diff --git a/samples/dns_sample/dns_v1/dns_v1_messages.py b/samples/dns_sample/dns_v1/dns_v1_messages.py
index ef474c4..5d5b77e 100644
--- a/samples/dns_sample/dns_v1/dns_v1_messages.py
+++ b/samples/dns_sample/dns_v1/dns_v1_messages.py
@@ -12,7 +12,7 @@
 
 
 class Change(_messages.Message):
-  """An atomic update to a collection of ResourceRecordSets.
+  r"""An atomic update to a collection of ResourceRecordSets.
 
   Enums:
     StatusValueValuesEnum: Status of the operation (output only).
@@ -31,7 +31,7 @@
   """
 
   class StatusValueValuesEnum(_messages.Enum):
-    """Status of the operation (output only).
+    r"""Status of the operation (output only).
 
     Values:
       done: <no description>
@@ -49,7 +49,7 @@
 
 
 class ChangesListResponse(_messages.Message):
-  """The response to a request to enumerate Changes to a ResourceRecordSets
+  r"""The response to a request to enumerate Changes to a ResourceRecordSets
   collection.
 
   Fields:
@@ -72,7 +72,7 @@
 
 
 class DnsChangesCreateRequest(_messages.Message):
-  """A DnsChangesCreateRequest object.
+  r"""A DnsChangesCreateRequest object.
 
   Fields:
     change: A Change resource to be passed as the request body.
@@ -87,7 +87,7 @@
 
 
 class DnsChangesGetRequest(_messages.Message):
-  """A DnsChangesGetRequest object.
+  r"""A DnsChangesGetRequest object.
 
   Fields:
     changeId: The identifier of the requested change, from a previous
@@ -103,7 +103,7 @@
 
 
 class DnsChangesListRequest(_messages.Message):
-  """A DnsChangesListRequest object.
+  r"""A DnsChangesListRequest object.
 
   Enums:
     SortByValueValuesEnum: Sorting criterion. The only supported value is
@@ -122,7 +122,7 @@
   """
 
   class SortByValueValuesEnum(_messages.Enum):
-    """Sorting criterion. The only supported value is change sequence.
+    r"""Sorting criterion. The only supported value is change sequence.
 
     Values:
       changeSequence: <no description>
@@ -138,7 +138,7 @@
 
 
 class DnsManagedZonesCreateRequest(_messages.Message):
-  """A DnsManagedZonesCreateRequest object.
+  r"""A DnsManagedZonesCreateRequest object.
 
   Fields:
     managedZone: A ManagedZone resource to be passed as the request body.
@@ -150,7 +150,7 @@
 
 
 class DnsManagedZonesDeleteRequest(_messages.Message):
-  """A DnsManagedZonesDeleteRequest object.
+  r"""A DnsManagedZonesDeleteRequest object.
 
   Fields:
     managedZone: Identifies the managed zone addressed by this request. Can be
@@ -163,11 +163,11 @@
 
 
 class DnsManagedZonesDeleteResponse(_messages.Message):
-  """An empty DnsManagedZonesDelete response."""
+  r"""An empty DnsManagedZonesDelete response."""
 
 
 class DnsManagedZonesGetRequest(_messages.Message):
-  """A DnsManagedZonesGetRequest object.
+  r"""A DnsManagedZonesGetRequest object.
 
   Fields:
     managedZone: Identifies the managed zone addressed by this request. Can be
@@ -180,7 +180,7 @@
 
 
 class DnsManagedZonesListRequest(_messages.Message):
-  """A DnsManagedZonesListRequest object.
+  r"""A DnsManagedZonesListRequest object.
 
   Fields:
     dnsName: Restricts the list to return only zones with this domain name.
@@ -198,7 +198,7 @@
 
 
 class DnsProjectsGetRequest(_messages.Message):
-  """A DnsProjectsGetRequest object.
+  r"""A DnsProjectsGetRequest object.
 
   Fields:
     project: Identifies the project addressed by this request.
@@ -208,7 +208,7 @@
 
 
 class DnsResourceRecordSetsListRequest(_messages.Message):
-  """A DnsResourceRecordSetsListRequest object.
+  r"""A DnsResourceRecordSetsListRequest object.
 
   Fields:
     managedZone: Identifies the managed zone addressed by this request. Can be
@@ -233,7 +233,7 @@
 
 
 class ManagedZone(_messages.Message):
-  """A zone is a subtree of the DNS namespace under one administrative
+  r"""A zone is a subtree of the DNS namespace under one administrative
   responsibility. A ManagedZone is a resource that represents a DNS zone
   hosted by the Cloud DNS service.
 
@@ -270,7 +270,7 @@
 
 
 class ManagedZonesListResponse(_messages.Message):
-  """A ManagedZonesListResponse object.
+  r"""A ManagedZonesListResponse object.
 
   Fields:
     kind: Type of resource.
@@ -292,7 +292,7 @@
 
 
 class Project(_messages.Message):
-  """A project resource. The project is a top level container for resources
+  r"""A project resource. The project is a top level container for resources
   including Cloud DNS ManagedZones. Projects can be created only in the APIs
   console.
 
@@ -312,7 +312,7 @@
 
 
 class Quota(_messages.Message):
-  """Limits associated with a Project.
+  r"""Limits associated with a Project.
 
   Fields:
     kind: Identifies what kind of resource this is. Value: the fixed string
@@ -340,7 +340,7 @@
 
 
 class ResourceRecordSet(_messages.Message):
-  """A unit of data that will be returned by the DNS servers.
+  r"""A unit of data that will be returned by the DNS servers.
 
   Fields:
     kind: Identifies what kind of resource this is. Value: the fixed string
@@ -361,7 +361,7 @@
 
 
 class ResourceRecordSetsListResponse(_messages.Message):
-  """A ResourceRecordSetsListResponse object.
+  r"""A ResourceRecordSetsListResponse object.
 
   Fields:
     kind: Type of resource.
@@ -383,7 +383,7 @@
 
 
 class StandardQueryParameters(_messages.Message):
-  """Query parameters accepted by all methods.
+  r"""Query parameters accepted by all methods.
 
   Enums:
     AltValueValuesEnum: Data format for the response.
@@ -406,7 +406,7 @@
   """
 
   class AltValueValuesEnum(_messages.Enum):
-    """Data format for the response.
+    r"""Data format for the response.
 
     Values:
       json: Responses with Content-Type of application/json
diff --git a/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1.py b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1.py
deleted file mode 100644
index 8bb2c8a..0000000
--- a/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1.py
+++ /dev/null
@@ -1,1797 +0,0 @@
-#!/usr/bin/env python
-"""CLI for fusiontables, version v1."""
-# NOTE: This file is autogenerated and should not be edited by hand.
-
-import code
-import os
-import platform
-import sys
-
-from apitools.base.protorpclite import message_types
-from apitools.base.protorpclite import messages
-
-from google.apputils import appcommands
-import gflags as flags
-
-import apitools.base.py as apitools_base
-from apitools.base.py import cli as apitools_base_cli
-import fusiontables_v1_client as client_lib
-import fusiontables_v1_messages as messages
-
-
-def _DeclareFusiontablesFlags():
-  """Declare global flags in an idempotent way."""
-  if 'api_endpoint' in flags.FLAGS:
-    return
-  flags.DEFINE_string(
-      'api_endpoint',
-      u'https://www.googleapis.com/fusiontables/v1/',
-      'URL of the API endpoint to use.',
-      short_name='fusiontables_url')
-  flags.DEFINE_string(
-      'history_file',
-      u'~/.fusiontables.v1.history',
-      'File with interactive shell history.')
-  flags.DEFINE_multistring(
-      'add_header', [],
-      'Additional http headers (as key=value strings). '
-      'Can be specified multiple times.')
-  flags.DEFINE_string(
-      'service_account_json_keyfile', '',
-      'Filename for a JSON service account key downloaded'
-      ' from the Developer Console.')
-  flags.DEFINE_enum(
-      'alt',
-      u'json',
-      [u'csv', u'json'],
-      u'Data format for the response.')
-  flags.DEFINE_string(
-      'fields',
-      None,
-      u'Selector specifying which fields to include in a partial response.')
-  flags.DEFINE_string(
-      'key',
-      None,
-      u'API key. Your API key identifies your project and provides you with '
-      u'API access, quota, and reports. Required unless you provide an OAuth '
-      u'2.0 token.')
-  flags.DEFINE_string(
-      'oauth_token',
-      None,
-      u'OAuth 2.0 token for the current user.')
-  flags.DEFINE_boolean(
-      'prettyPrint',
-      'True',
-      u'Returns response with indentations and line breaks.')
-  flags.DEFINE_string(
-      'quotaUser',
-      None,
-      u'Available to use for quota purposes for server-side applications. Can'
-      u' be any arbitrary string assigned to a user, but should not exceed 40'
-      u' characters. Overrides userIp if both are provided.')
-  flags.DEFINE_string(
-      'trace',
-      None,
-      'A tracing token of the form "token:<tokenid>" to include in api '
-      'requests.')
-  flags.DEFINE_string(
-      'userIp',
-      None,
-      u'IP address of the site where the request originates. Use this if you '
-      u'want to enforce per-user limits.')
-
-
-FLAGS = flags.FLAGS
-apitools_base_cli.DeclareBaseFlags()
-_DeclareFusiontablesFlags()
-
-
-def GetGlobalParamsFromFlags():
-  """Return a StandardQueryParameters based on flags."""
-  result = messages.StandardQueryParameters()
-  if FLAGS['alt'].present:
-    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
-  if FLAGS['fields'].present:
-    result.fields = FLAGS.fields.decode('utf8')
-  if FLAGS['key'].present:
-    result.key = FLAGS.key.decode('utf8')
-  if FLAGS['oauth_token'].present:
-    result.oauth_token = FLAGS.oauth_token.decode('utf8')
-  if FLAGS['prettyPrint'].present:
-    result.prettyPrint = FLAGS.prettyPrint
-  if FLAGS['quotaUser'].present:
-    result.quotaUser = FLAGS.quotaUser.decode('utf8')
-  if FLAGS['trace'].present:
-    result.trace = FLAGS.trace.decode('utf8')
-  if FLAGS['userIp'].present:
-    result.userIp = FLAGS.userIp.decode('utf8')
-  return result
-
-
-def GetClientFromFlags():
-  """Return a client object, configured from flags."""
-  log_request = FLAGS.log_request or FLAGS.log_request_response
-  log_response = FLAGS.log_response or FLAGS.log_request_response
-  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
-  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
-  credentials_args = {
-      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
-  }
-  try:
-    client = client_lib.FusiontablesV1(
-        api_endpoint, log_request=log_request,
-        log_response=log_response,
-        credentials_args=credentials_args,
-        additional_http_headers=additional_http_headers)
-  except apitools_base.CredentialsError as e:
-    print 'Error creating credentials: %s' % e
-    sys.exit(1)
-  return client
-
-
-class PyShell(appcommands.Cmd):
-
-  def Run(self, _):
-    """Run an interactive python shell with the client."""
-    client = GetClientFromFlags()
-    params = GetGlobalParamsFromFlags()
-    for field in params.all_fields():
-      value = params.get_assigned_value(field.name)
-      if value != field.default:
-        client.AddGlobalParam(field.name, value)
-    banner = """
-           == fusiontables interactive console ==
-                 client: a fusiontables client
-          apitools_base: base apitools module
-         messages: the generated messages module
-    """
-    local_vars = {
-        'apitools_base': apitools_base,
-        'client': client,
-        'client_lib': client_lib,
-        'messages': messages,
-    }
-    if platform.system() == 'Linux':
-      console = apitools_base_cli.ConsoleWithReadline(
-          local_vars, histfile=FLAGS.history_file)
-    else:
-      console = code.InteractiveConsole(local_vars)
-    try:
-      console.interact(banner)
-    except SystemExit as e:
-      return e.code
-
-
-class ColumnDelete(apitools_base_cli.NewCmd):
-  """Command wrapping column.Delete."""
-
-  usage = """column_delete <tableId> <columnId>"""
-
-  def __init__(self, name, fv):
-    super(ColumnDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId, columnId):
-    """Deletes the column.
-
-    Args:
-      tableId: Table from which the column is being deleted.
-      columnId: Name or identifier for the column being deleted.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesColumnDeleteRequest(
-        tableId=tableId.decode('utf8'),
-        columnId=columnId.decode('utf8'),
-        )
-    result = client.column.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ColumnGet(apitools_base_cli.NewCmd):
-  """Command wrapping column.Get."""
-
-  usage = """column_get <tableId> <columnId>"""
-
-  def __init__(self, name, fv):
-    super(ColumnGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId, columnId):
-    """Retrieves a specific column by its id.
-
-    Args:
-      tableId: Table to which the column belongs.
-      columnId: Name or identifier for the column that is being requested.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesColumnGetRequest(
-        tableId=tableId.decode('utf8'),
-        columnId=columnId.decode('utf8'),
-        )
-    result = client.column.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ColumnInsert(apitools_base_cli.NewCmd):
-  """Command wrapping column.Insert."""
-
-  usage = """column_insert <tableId>"""
-
-  def __init__(self, name, fv):
-    super(ColumnInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'column',
-        None,
-        u'A Column resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Adds a new column to the table.
-
-    Args:
-      tableId: Table for which a new column is being added.
-
-    Flags:
-      column: A Column resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesColumnInsertRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['column'].present:
-      request.column = apitools_base.JsonToMessage(messages.Column, FLAGS.column)
-    result = client.column.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ColumnList(apitools_base_cli.NewCmd):
-  """Command wrapping column.List."""
-
-  usage = """column_list <tableId>"""
-
-  def __init__(self, name, fv):
-    super(ColumnList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of columns to return. Optional. Default is 5.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Continuation token specifying which result page to return. '
-        u'Optional.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Retrieves a list of columns.
-
-    Args:
-      tableId: Table whose columns are being listed.
-
-    Flags:
-      maxResults: Maximum number of columns to return. Optional. Default is 5.
-      pageToken: Continuation token specifying which result page to return.
-        Optional.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesColumnListRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.column.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ColumnPatch(apitools_base_cli.NewCmd):
-  """Command wrapping column.Patch."""
-
-  usage = """column_patch <tableId> <columnId>"""
-
-  def __init__(self, name, fv):
-    super(ColumnPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'column',
-        None,
-        u'A Column resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId, columnId):
-    """Updates the name or type of an existing column. This method supports
-    patch semantics.
-
-    Args:
-      tableId: Table for which the column is being updated.
-      columnId: Name or identifier for the column that is being updated.
-
-    Flags:
-      column: A Column resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesColumnPatchRequest(
-        tableId=tableId.decode('utf8'),
-        columnId=columnId.decode('utf8'),
-        )
-    if FLAGS['column'].present:
-      request.column = apitools_base.JsonToMessage(messages.Column, FLAGS.column)
-    result = client.column.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ColumnUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping column.Update."""
-
-  usage = """column_update <tableId> <columnId>"""
-
-  def __init__(self, name, fv):
-    super(ColumnUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'column',
-        None,
-        u'A Column resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId, columnId):
-    """Updates the name or type of an existing column.
-
-    Args:
-      tableId: Table for which the column is being updated.
-      columnId: Name or identifier for the column that is being updated.
-
-    Flags:
-      column: A Column resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesColumnUpdateRequest(
-        tableId=tableId.decode('utf8'),
-        columnId=columnId.decode('utf8'),
-        )
-    if FLAGS['column'].present:
-      request.column = apitools_base.JsonToMessage(messages.Column, FLAGS.column)
-    result = client.column.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class QuerySql(apitools_base_cli.NewCmd):
-  """Command wrapping query.Sql."""
-
-  usage = """query_sql <sql>"""
-
-  def __init__(self, name, fv):
-    super(QuerySql, self).__init__(name, fv)
-    flags.DEFINE_boolean(
-        'hdrs',
-        None,
-        u'Should column names be included (in the first row)?. Default is '
-        u'true.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'typed',
-        None,
-        u'Should typed values be returned in the (JSON) response -- numbers '
-        u'for numeric values and parsed geometries for KML values? Default is'
-        u' true.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'download_filename',
-        '',
-        'Filename to use for download.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'overwrite',
-        'False',
-        'If True, overwrite the existing file when downloading.',
-        flag_values=fv)
-
-  def RunWithArgs(self, sql):
-    """Executes an SQL SELECT/INSERT/UPDATE/DELETE/SHOW/DESCRIBE/CREATE
-    statement.
-
-    Args:
-      sql: An SQL SELECT/SHOW/DESCRIBE/INSERT/UPDATE/DELETE/CREATE statement.
-
-    Flags:
-      hdrs: Should column names be included (in the first row)?. Default is
-        true.
-      typed: Should typed values be returned in the (JSON) response -- numbers
-        for numeric values and parsed geometries for KML values? Default is
-        true.
-      download_filename: Filename to use for download.
-      overwrite: If True, overwrite the existing file when downloading.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesQuerySqlRequest(
-        sql=sql.decode('utf8'),
-        )
-    if FLAGS['hdrs'].present:
-      request.hdrs = FLAGS.hdrs
-    if FLAGS['typed'].present:
-      request.typed = FLAGS.typed
-    download = None
-    if FLAGS.download_filename:
-      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
-          progress_callback=apitools_base.DownloadProgressPrinter,
-          finish_callback=apitools_base.DownloadCompletePrinter)
-    result = client.query.Sql(
-        request, global_params=global_params, download=download)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class QuerySqlGet(apitools_base_cli.NewCmd):
-  """Command wrapping query.SqlGet."""
-
-  usage = """query_sqlGet <sql>"""
-
-  def __init__(self, name, fv):
-    super(QuerySqlGet, self).__init__(name, fv)
-    flags.DEFINE_boolean(
-        'hdrs',
-        None,
-        u'Should column names be included (in the first row)?. Default is '
-        u'true.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'typed',
-        None,
-        u'Should typed values be returned in the (JSON) response -- numbers '
-        u'for numeric values and parsed geometries for KML values? Default is'
-        u' true.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'download_filename',
-        '',
-        'Filename to use for download.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'overwrite',
-        'False',
-        'If True, overwrite the existing file when downloading.',
-        flag_values=fv)
-
-  def RunWithArgs(self, sql):
-    """Executes an SQL SELECT/SHOW/DESCRIBE statement.
-
-    Args:
-      sql: An SQL SELECT/SHOW/DESCRIBE statement.
-
-    Flags:
-      hdrs: Should column names be included (in the first row)?. Default is
-        true.
-      typed: Should typed values be returned in the (JSON) response -- numbers
-        for numeric values and parsed geometries for KML values? Default is
-        true.
-      download_filename: Filename to use for download.
-      overwrite: If True, overwrite the existing file when downloading.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesQuerySqlGetRequest(
-        sql=sql.decode('utf8'),
-        )
-    if FLAGS['hdrs'].present:
-      request.hdrs = FLAGS.hdrs
-    if FLAGS['typed'].present:
-      request.typed = FLAGS.typed
-    download = None
-    if FLAGS.download_filename:
-      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
-          progress_callback=apitools_base.DownloadProgressPrinter,
-          finish_callback=apitools_base.DownloadCompletePrinter)
-    result = client.query.SqlGet(
-        request, global_params=global_params, download=download)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class StyleDelete(apitools_base_cli.NewCmd):
-  """Command wrapping style.Delete."""
-
-  usage = """style_delete <tableId> <styleId>"""
-
-  def __init__(self, name, fv):
-    super(StyleDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId, styleId):
-    """Deletes a style.
-
-    Args:
-      tableId: Table from which the style is being deleted
-      styleId: Identifier (within a table) for the style being deleted
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesStyleDeleteRequest(
-        tableId=tableId.decode('utf8'),
-        styleId=styleId,
-        )
-    result = client.style.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class StyleGet(apitools_base_cli.NewCmd):
-  """Command wrapping style.Get."""
-
-  usage = """style_get <tableId> <styleId>"""
-
-  def __init__(self, name, fv):
-    super(StyleGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId, styleId):
-    """Gets a specific style.
-
-    Args:
-      tableId: Table to which the requested style belongs
-      styleId: Identifier (integer) for a specific style in a table
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesStyleGetRequest(
-        tableId=tableId.decode('utf8'),
-        styleId=styleId,
-        )
-    result = client.style.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class StyleInsert(apitools_base_cli.NewCmd):
-  """Command wrapping style.Insert."""
-
-  usage = """style_insert <tableId>"""
-
-  def __init__(self, name, fv):
-    super(StyleInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'kind',
-        u'fusiontables#styleSetting',
-        u'Type name: an individual style setting. A StyleSetting contains the'
-        u' style defintions for points, lines, and polygons in a table. Since'
-        u' a table can have any one or all of them, a style definition can '
-        u'have point, line and polygon style definitions.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'markerOptions',
-        None,
-        u'Style definition for points in the table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'name',
-        None,
-        u'Optional name for the style setting.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'polygonOptions',
-        None,
-        u'Style definition for polygons in the table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'polylineOptions',
-        None,
-        u'Style definition for lines in the table.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'styleId',
-        None,
-        u'Identifier for the style setting (unique only within tables).',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Adds a new style for the table.
-
-    Args:
-      tableId: Identifier for the table.
-
-    Flags:
-      kind: Type name: an individual style setting. A StyleSetting contains
-        the style defintions for points, lines, and polygons in a table. Since
-        a table can have any one or all of them, a style definition can have
-        point, line and polygon style definitions.
-      markerOptions: Style definition for points in the table.
-      name: Optional name for the style setting.
-      polygonOptions: Style definition for polygons in the table.
-      polylineOptions: Style definition for lines in the table.
-      styleId: Identifier for the style setting (unique only within tables).
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StyleSetting(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['markerOptions'].present:
-      request.markerOptions = apitools_base.JsonToMessage(messages.PointStyle, FLAGS.markerOptions)
-    if FLAGS['name'].present:
-      request.name = FLAGS.name.decode('utf8')
-    if FLAGS['polygonOptions'].present:
-      request.polygonOptions = apitools_base.JsonToMessage(messages.PolygonStyle, FLAGS.polygonOptions)
-    if FLAGS['polylineOptions'].present:
-      request.polylineOptions = apitools_base.JsonToMessage(messages.LineStyle, FLAGS.polylineOptions)
-    if FLAGS['styleId'].present:
-      request.styleId = FLAGS.styleId
-    result = client.style.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class StyleList(apitools_base_cli.NewCmd):
-  """Command wrapping style.List."""
-
-  usage = """style_list <tableId>"""
-
-  def __init__(self, name, fv):
-    super(StyleList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of styles to return. Optional. Default is 5.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Continuation token specifying which result page to return. '
-        u'Optional.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Retrieves a list of styles.
-
-    Args:
-      tableId: Table whose styles are being listed
-
-    Flags:
-      maxResults: Maximum number of styles to return. Optional. Default is 5.
-      pageToken: Continuation token specifying which result page to return.
-        Optional.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesStyleListRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.style.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class StylePatch(apitools_base_cli.NewCmd):
-  """Command wrapping style.Patch."""
-
-  usage = """style_patch <tableId> <styleId>"""
-
-  def __init__(self, name, fv):
-    super(StylePatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'kind',
-        u'fusiontables#styleSetting',
-        u'Type name: an individual style setting. A StyleSetting contains the'
-        u' style defintions for points, lines, and polygons in a table. Since'
-        u' a table can have any one or all of them, a style definition can '
-        u'have point, line and polygon style definitions.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'markerOptions',
-        None,
-        u'Style definition for points in the table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'name',
-        None,
-        u'Optional name for the style setting.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'polygonOptions',
-        None,
-        u'Style definition for polygons in the table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'polylineOptions',
-        None,
-        u'Style definition for lines in the table.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId, styleId):
-    """Updates an existing style. This method supports patch semantics.
-
-    Args:
-      tableId: Identifier for the table.
-      styleId: Identifier for the style setting (unique only within tables).
-
-    Flags:
-      kind: Type name: an individual style setting. A StyleSetting contains
-        the style defintions for points, lines, and polygons in a table. Since
-        a table can have any one or all of them, a style definition can have
-        point, line and polygon style definitions.
-      markerOptions: Style definition for points in the table.
-      name: Optional name for the style setting.
-      polygonOptions: Style definition for polygons in the table.
-      polylineOptions: Style definition for lines in the table.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StyleSetting(
-        tableId=tableId.decode('utf8'),
-        styleId=styleId,
-        )
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['markerOptions'].present:
-      request.markerOptions = apitools_base.JsonToMessage(messages.PointStyle, FLAGS.markerOptions)
-    if FLAGS['name'].present:
-      request.name = FLAGS.name.decode('utf8')
-    if FLAGS['polygonOptions'].present:
-      request.polygonOptions = apitools_base.JsonToMessage(messages.PolygonStyle, FLAGS.polygonOptions)
-    if FLAGS['polylineOptions'].present:
-      request.polylineOptions = apitools_base.JsonToMessage(messages.LineStyle, FLAGS.polylineOptions)
-    result = client.style.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class StyleUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping style.Update."""
-
-  usage = """style_update <tableId> <styleId>"""
-
-  def __init__(self, name, fv):
-    super(StyleUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'kind',
-        u'fusiontables#styleSetting',
-        u'Type name: an individual style setting. A StyleSetting contains the'
-        u' style defintions for points, lines, and polygons in a table. Since'
-        u' a table can have any one or all of them, a style definition can '
-        u'have point, line and polygon style definitions.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'markerOptions',
-        None,
-        u'Style definition for points in the table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'name',
-        None,
-        u'Optional name for the style setting.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'polygonOptions',
-        None,
-        u'Style definition for polygons in the table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'polylineOptions',
-        None,
-        u'Style definition for lines in the table.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId, styleId):
-    """Updates an existing style.
-
-    Args:
-      tableId: Identifier for the table.
-      styleId: Identifier for the style setting (unique only within tables).
-
-    Flags:
-      kind: Type name: an individual style setting. A StyleSetting contains
-        the style defintions for points, lines, and polygons in a table. Since
-        a table can have any one or all of them, a style definition can have
-        point, line and polygon style definitions.
-      markerOptions: Style definition for points in the table.
-      name: Optional name for the style setting.
-      polygonOptions: Style definition for polygons in the table.
-      polylineOptions: Style definition for lines in the table.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StyleSetting(
-        tableId=tableId.decode('utf8'),
-        styleId=styleId,
-        )
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['markerOptions'].present:
-      request.markerOptions = apitools_base.JsonToMessage(messages.PointStyle, FLAGS.markerOptions)
-    if FLAGS['name'].present:
-      request.name = FLAGS.name.decode('utf8')
-    if FLAGS['polygonOptions'].present:
-      request.polygonOptions = apitools_base.JsonToMessage(messages.PolygonStyle, FLAGS.polygonOptions)
-    if FLAGS['polylineOptions'].present:
-      request.polylineOptions = apitools_base.JsonToMessage(messages.LineStyle, FLAGS.polylineOptions)
-    result = client.style.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TableCopy(apitools_base_cli.NewCmd):
-  """Command wrapping table.Copy."""
-
-  usage = """table_copy <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TableCopy, self).__init__(name, fv)
-    flags.DEFINE_boolean(
-        'copyPresentation',
-        None,
-        u'Whether to also copy tabs, styles, and templates. Default is false.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Copies a table.
-
-    Args:
-      tableId: ID of the table that is being copied.
-
-    Flags:
-      copyPresentation: Whether to also copy tabs, styles, and templates.
-        Default is false.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTableCopyRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['copyPresentation'].present:
-      request.copyPresentation = FLAGS.copyPresentation
-    result = client.table.Copy(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TableDelete(apitools_base_cli.NewCmd):
-  """Command wrapping table.Delete."""
-
-  usage = """table_delete <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TableDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId):
-    """Deletes a table.
-
-    Args:
-      tableId: ID of the table that is being deleted.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTableDeleteRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    result = client.table.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TableGet(apitools_base_cli.NewCmd):
-  """Command wrapping table.Get."""
-
-  usage = """table_get <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TableGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId):
-    """Retrieves a specific table by its id.
-
-    Args:
-      tableId: Identifier(ID) for the table being requested.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTableGetRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    result = client.table.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TableImportRows(apitools_base_cli.NewCmd):
-  """Command wrapping table.ImportRows."""
-
-  usage = """table_importRows <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TableImportRows, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'delimiter',
-        None,
-        u'The delimiter used to separate cell values. This can only consist '
-        u"of a single character. Default is ','.",
-        flag_values=fv)
-    flags.DEFINE_string(
-        'encoding',
-        None,
-        u"The encoding of the content. Default is UTF-8. Use 'auto-detect' if"
-        u' you are unsure of the encoding.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'endLine',
-        None,
-        u'The index of the last line from which to start importing, '
-        u'exclusive. Thus, the number of imported lines is endLine - '
-        u'startLine. If this parameter is not provided, the file will be '
-        u'imported until the last line of the file. If endLine is negative, '
-        u'then the imported content will exclude the last endLine lines. That'
-        u' is, if endline is negative, no line will be imported whose index '
-        u'is greater than N + endLine where N is the number of lines in the '
-        u'file, and the number of imported lines will be N + endLine - '
-        u'startLine.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'isStrict',
-        None,
-        u'Whether the CSV must have the same number of values for each row. '
-        u'If false, rows with fewer values will be padded with empty values. '
-        u'Default is true.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'startLine',
-        None,
-        u'The index of the first line from which to start importing, '
-        u'inclusive. Default is 0.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'upload_filename',
-        '',
-        'Filename to use for upload.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'upload_mime_type',
-        '',
-        'MIME type to use for the upload. Only needed if the extension on '
-        '--upload_filename does not determine the correct (or any) MIME '
-        'type.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Import more rows into a table.
-
-    Args:
-      tableId: The table into which new rows are being imported.
-
-    Flags:
-      delimiter: The delimiter used to separate cell values. This can only
-        consist of a single character. Default is ','.
-      encoding: The encoding of the content. Default is UTF-8. Use 'auto-
-        detect' if you are unsure of the encoding.
-      endLine: The index of the last line from which to start importing,
-        exclusive. Thus, the number of imported lines is endLine - startLine.
-        If this parameter is not provided, the file will be imported until the
-        last line of the file. If endLine is negative, then the imported
-        content will exclude the last endLine lines. That is, if endline is
-        negative, no line will be imported whose index is greater than N +
-        endLine where N is the number of lines in the file, and the number of
-        imported lines will be N + endLine - startLine.
-      isStrict: Whether the CSV must have the same number of values for each
-        row. If false, rows with fewer values will be padded with empty
-        values. Default is true.
-      startLine: The index of the first line from which to start importing,
-        inclusive. Default is 0.
-      upload_filename: Filename to use for upload.
-      upload_mime_type: MIME type to use for the upload. Only needed if the
-        extension on --upload_filename does not determine the correct (or any)
-        MIME type.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTableImportRowsRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['delimiter'].present:
-      request.delimiter = FLAGS.delimiter.decode('utf8')
-    if FLAGS['encoding'].present:
-      request.encoding = FLAGS.encoding.decode('utf8')
-    if FLAGS['endLine'].present:
-      request.endLine = FLAGS.endLine
-    if FLAGS['isStrict'].present:
-      request.isStrict = FLAGS.isStrict
-    if FLAGS['startLine'].present:
-      request.startLine = FLAGS.startLine
-    upload = None
-    if FLAGS.upload_filename:
-      upload = apitools_base.Upload.FromFile(
-          FLAGS.upload_filename, FLAGS.upload_mime_type,
-          progress_callback=apitools_base.UploadProgressPrinter,
-          finish_callback=apitools_base.UploadCompletePrinter)
-    result = client.table.ImportRows(
-        request, global_params=global_params, upload=upload)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TableImportTable(apitools_base_cli.NewCmd):
-  """Command wrapping table.ImportTable."""
-
-  usage = """table_importTable <name>"""
-
-  def __init__(self, name, fv):
-    super(TableImportTable, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'delimiter',
-        None,
-        u'The delimiter used to separate cell values. This can only consist '
-        u"of a single character. Default is ','.",
-        flag_values=fv)
-    flags.DEFINE_string(
-        'encoding',
-        None,
-        u"The encoding of the content. Default is UTF-8. Use 'auto-detect' if"
-        u' you are unsure of the encoding.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'upload_filename',
-        '',
-        'Filename to use for upload.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'upload_mime_type',
-        '',
-        'MIME type to use for the upload. Only needed if the extension on '
-        '--upload_filename does not determine the correct (or any) MIME '
-        'type.',
-        flag_values=fv)
-
-  def RunWithArgs(self, name):
-    """Import a new table.
-
-    Args:
-      name: The name to be assigned to the new table.
-
-    Flags:
-      delimiter: The delimiter used to separate cell values. This can only
-        consist of a single character. Default is ','.
-      encoding: The encoding of the content. Default is UTF-8. Use 'auto-
-        detect' if you are unsure of the encoding.
-      upload_filename: Filename to use for upload.
-      upload_mime_type: MIME type to use for the upload. Only needed if the
-        extension on --upload_filename does not determine the correct (or any)
-        MIME type.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTableImportTableRequest(
-        name=name.decode('utf8'),
-        )
-    if FLAGS['delimiter'].present:
-      request.delimiter = FLAGS.delimiter.decode('utf8')
-    if FLAGS['encoding'].present:
-      request.encoding = FLAGS.encoding.decode('utf8')
-    upload = None
-    if FLAGS.upload_filename:
-      upload = apitools_base.Upload.FromFile(
-          FLAGS.upload_filename, FLAGS.upload_mime_type,
-          progress_callback=apitools_base.UploadProgressPrinter,
-          finish_callback=apitools_base.UploadCompletePrinter)
-    result = client.table.ImportTable(
-        request, global_params=global_params, upload=upload)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TableInsert(apitools_base_cli.NewCmd):
-  """Command wrapping table.Insert."""
-
-  usage = """table_insert"""
-
-  def __init__(self, name, fv):
-    super(TableInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'attribution',
-        None,
-        u'Optional attribution assigned to the table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'attributionLink',
-        None,
-        u'Optional link for attribution.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'baseTableIds',
-        None,
-        u'Optional base table identifier if this table is a view or merged '
-        u'table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'columns',
-        None,
-        u'Columns in the table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'description',
-        None,
-        u'Optional description assigned to the table.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'isExportable',
-        None,
-        u'Variable for whether table is exportable.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'fusiontables#table',
-        u'Type name: a template for an individual table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'name',
-        None,
-        u'Name assigned to a table.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'sql',
-        None,
-        u'Optional sql that encodes the table definition for derived tables.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'tableId',
-        None,
-        u'Encrypted unique alphanumeric identifier for the table.',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """Creates a new table.
-
-    Flags:
-      attribution: Optional attribution assigned to the table.
-      attributionLink: Optional link for attribution.
-      baseTableIds: Optional base table identifier if this table is a view or
-        merged table.
-      columns: Columns in the table.
-      description: Optional description assigned to the table.
-      isExportable: Variable for whether table is exportable.
-      kind: Type name: a template for an individual table.
-      name: Name assigned to a table.
-      sql: Optional sql that encodes the table definition for derived tables.
-      tableId: Encrypted unique alphanumeric identifier for the table.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.Table(
-        )
-    if FLAGS['attribution'].present:
-      request.attribution = FLAGS.attribution.decode('utf8')
-    if FLAGS['attributionLink'].present:
-      request.attributionLink = FLAGS.attributionLink.decode('utf8')
-    if FLAGS['baseTableIds'].present:
-      request.baseTableIds = [x.decode('utf8') for x in FLAGS.baseTableIds]
-    if FLAGS['columns'].present:
-      request.columns = [apitools_base.JsonToMessage(messages.Column, x) for x in FLAGS.columns]
-    if FLAGS['description'].present:
-      request.description = FLAGS.description.decode('utf8')
-    if FLAGS['isExportable'].present:
-      request.isExportable = FLAGS.isExportable
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['name'].present:
-      request.name = FLAGS.name.decode('utf8')
-    if FLAGS['sql'].present:
-      request.sql = FLAGS.sql.decode('utf8')
-    if FLAGS['tableId'].present:
-      request.tableId = FLAGS.tableId.decode('utf8')
-    result = client.table.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TableList(apitools_base_cli.NewCmd):
-  """Command wrapping table.List."""
-
-  usage = """table_list"""
-
-  def __init__(self, name, fv):
-    super(TableList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of styles to return. Optional. Default is 5.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Continuation token specifying which result page to return. '
-        u'Optional.',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """Retrieves a list of tables a user owns.
-
-    Flags:
-      maxResults: Maximum number of styles to return. Optional. Default is 5.
-      pageToken: Continuation token specifying which result page to return.
-        Optional.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTableListRequest(
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.table.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TablePatch(apitools_base_cli.NewCmd):
-  """Command wrapping table.Patch."""
-
-  usage = """table_patch <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TablePatch, self).__init__(name, fv)
-    flags.DEFINE_boolean(
-        'replaceViewDefinition',
-        None,
-        u'Should the view definition also be updated? The specified view '
-        u'definition replaces the existing one. Only a view can be updated '
-        u'with a new definition.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'table',
-        None,
-        u'A Table resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Updates an existing table. Unless explicitly requested, only the name,
-    description, and attribution will be updated. This method supports patch
-    semantics.
-
-    Args:
-      tableId: ID of the table that is being updated.
-
-    Flags:
-      replaceViewDefinition: Should the view definition also be updated? The
-        specified view definition replaces the existing one. Only a view can
-        be updated with a new definition.
-      table: A Table resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTablePatchRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['replaceViewDefinition'].present:
-      request.replaceViewDefinition = FLAGS.replaceViewDefinition
-    if FLAGS['table'].present:
-      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
-    result = client.table.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TableUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping table.Update."""
-
-  usage = """table_update <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TableUpdate, self).__init__(name, fv)
-    flags.DEFINE_boolean(
-        'replaceViewDefinition',
-        None,
-        u'Should the view definition also be updated? The specified view '
-        u'definition replaces the existing one. Only a view can be updated '
-        u'with a new definition.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'table',
-        None,
-        u'A Table resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Updates an existing table. Unless explicitly requested, only the name,
-    description, and attribution will be updated.
-
-    Args:
-      tableId: ID of the table that is being updated.
-
-    Flags:
-      replaceViewDefinition: Should the view definition also be updated? The
-        specified view definition replaces the existing one. Only a view can
-        be updated with a new definition.
-      table: A Table resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTableUpdateRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['replaceViewDefinition'].present:
-      request.replaceViewDefinition = FLAGS.replaceViewDefinition
-    if FLAGS['table'].present:
-      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
-    result = client.table.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TaskDelete(apitools_base_cli.NewCmd):
-  """Command wrapping task.Delete."""
-
-  usage = """task_delete <tableId> <taskId>"""
-
-  def __init__(self, name, fv):
-    super(TaskDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId, taskId):
-    """Deletes the task, unless already started.
-
-    Args:
-      tableId: Table from which the task is being deleted.
-      taskId: A string attribute.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTaskDeleteRequest(
-        tableId=tableId.decode('utf8'),
-        taskId=taskId.decode('utf8'),
-        )
-    result = client.task.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TaskGet(apitools_base_cli.NewCmd):
-  """Command wrapping task.Get."""
-
-  usage = """task_get <tableId> <taskId>"""
-
-  def __init__(self, name, fv):
-    super(TaskGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId, taskId):
-    """Retrieves a specific task by its id.
-
-    Args:
-      tableId: Table to which the task belongs.
-      taskId: A string attribute.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTaskGetRequest(
-        tableId=tableId.decode('utf8'),
-        taskId=taskId.decode('utf8'),
-        )
-    result = client.task.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TaskList(apitools_base_cli.NewCmd):
-  """Command wrapping task.List."""
-
-  usage = """task_list <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TaskList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of columns to return. Optional. Default is 5.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        'A string attribute.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'startIndex',
-        None,
-        'A integer attribute.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Retrieves a list of tasks.
-
-    Args:
-      tableId: Table whose tasks are being listed.
-
-    Flags:
-      maxResults: Maximum number of columns to return. Optional. Default is 5.
-      pageToken: A string attribute.
-      startIndex: A integer attribute.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTaskListRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['startIndex'].present:
-      request.startIndex = FLAGS.startIndex
-    result = client.task.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TemplateDelete(apitools_base_cli.NewCmd):
-  """Command wrapping template.Delete."""
-
-  usage = """template_delete <tableId> <templateId>"""
-
-  def __init__(self, name, fv):
-    super(TemplateDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId, templateId):
-    """Deletes a template
-
-    Args:
-      tableId: Table from which the template is being deleted
-      templateId: Identifier for the template which is being deleted
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTemplateDeleteRequest(
-        tableId=tableId.decode('utf8'),
-        templateId=templateId,
-        )
-    result = client.template.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TemplateGet(apitools_base_cli.NewCmd):
-  """Command wrapping template.Get."""
-
-  usage = """template_get <tableId> <templateId>"""
-
-  def __init__(self, name, fv):
-    super(TemplateGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, tableId, templateId):
-    """Retrieves a specific template by its id
-
-    Args:
-      tableId: Table to which the template belongs
-      templateId: Identifier for the template that is being requested
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTemplateGetRequest(
-        tableId=tableId.decode('utf8'),
-        templateId=templateId,
-        )
-    result = client.template.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TemplateInsert(apitools_base_cli.NewCmd):
-  """Command wrapping template.Insert."""
-
-  usage = """template_insert <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TemplateInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'automaticColumnNames',
-        None,
-        u'List of columns from which the template is to be automatically '
-        u'constructed. Only one of body or automaticColumns can be specified.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'body',
-        None,
-        u'Body of the template. It contains HTML with {column_name} to insert'
-        u' values from a particular column. The body is sanitized to remove '
-        u'certain tags, e.g., script. Only one of body or automaticColumns '
-        u'can be specified.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'fusiontables#template',
-        u'Type name: a template for the info window contents. The template '
-        u'can either include an HTML body or a list of columns from which the'
-        u' template is computed automatically.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'name',
-        None,
-        u'Optional name assigned to a template.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'templateId',
-        None,
-        u'Identifier for the template, unique within the context of a '
-        u'particular table.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Creates a new template for the table.
-
-    Args:
-      tableId: Identifier for the table for which the template is defined.
-
-    Flags:
-      automaticColumnNames: List of columns from which the template is to be
-        automatically constructed. Only one of body or automaticColumns can be
-        specified.
-      body: Body of the template. It contains HTML with {column_name} to
-        insert values from a particular column. The body is sanitized to
-        remove certain tags, e.g., script. Only one of body or
-        automaticColumns can be specified.
-      kind: Type name: a template for the info window contents. The template
-        can either include an HTML body or a list of columns from which the
-        template is computed automatically.
-      name: Optional name assigned to a template.
-      templateId: Identifier for the template, unique within the context of a
-        particular table.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.Template(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['automaticColumnNames'].present:
-      request.automaticColumnNames = [x.decode('utf8') for x in FLAGS.automaticColumnNames]
-    if FLAGS['body'].present:
-      request.body = FLAGS.body.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['name'].present:
-      request.name = FLAGS.name.decode('utf8')
-    if FLAGS['templateId'].present:
-      request.templateId = FLAGS.templateId
-    result = client.template.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TemplateList(apitools_base_cli.NewCmd):
-  """Command wrapping template.List."""
-
-  usage = """template_list <tableId>"""
-
-  def __init__(self, name, fv):
-    super(TemplateList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of templates to return. Optional. Default is 5.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Continuation token specifying which results page to return. '
-        u'Optional.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId):
-    """Retrieves a list of templates.
-
-    Args:
-      tableId: Identifier for the table whose templates are being requested
-
-    Flags:
-      maxResults: Maximum number of templates to return. Optional. Default is
-        5.
-      pageToken: Continuation token specifying which results page to return.
-        Optional.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.FusiontablesTemplateListRequest(
-        tableId=tableId.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.template.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TemplatePatch(apitools_base_cli.NewCmd):
-  """Command wrapping template.Patch."""
-
-  usage = """template_patch <tableId> <templateId>"""
-
-  def __init__(self, name, fv):
-    super(TemplatePatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'automaticColumnNames',
-        None,
-        u'List of columns from which the template is to be automatically '
-        u'constructed. Only one of body or automaticColumns can be specified.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'body',
-        None,
-        u'Body of the template. It contains HTML with {column_name} to insert'
-        u' values from a particular column. The body is sanitized to remove '
-        u'certain tags, e.g., script. Only one of body or automaticColumns '
-        u'can be specified.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'fusiontables#template',
-        u'Type name: a template for the info window contents. The template '
-        u'can either include an HTML body or a list of columns from which the'
-        u' template is computed automatically.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'name',
-        None,
-        u'Optional name assigned to a template.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId, templateId):
-    """Updates an existing template. This method supports patch semantics.
-
-    Args:
-      tableId: Identifier for the table for which the template is defined.
-      templateId: Identifier for the template, unique within the context of a
-        particular table.
-
-    Flags:
-      automaticColumnNames: List of columns from which the template is to be
-        automatically constructed. Only one of body or automaticColumns can be
-        specified.
-      body: Body of the template. It contains HTML with {column_name} to
-        insert values from a particular column. The body is sanitized to
-        remove certain tags, e.g., script. Only one of body or
-        automaticColumns can be specified.
-      kind: Type name: a template for the info window contents. The template
-        can either include an HTML body or a list of columns from which the
-        template is computed automatically.
-      name: Optional name assigned to a template.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.Template(
-        tableId=tableId.decode('utf8'),
-        templateId=templateId,
-        )
-    if FLAGS['automaticColumnNames'].present:
-      request.automaticColumnNames = [x.decode('utf8') for x in FLAGS.automaticColumnNames]
-    if FLAGS['body'].present:
-      request.body = FLAGS.body.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['name'].present:
-      request.name = FLAGS.name.decode('utf8')
-    result = client.template.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class TemplateUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping template.Update."""
-
-  usage = """template_update <tableId> <templateId>"""
-
-  def __init__(self, name, fv):
-    super(TemplateUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'automaticColumnNames',
-        None,
-        u'List of columns from which the template is to be automatically '
-        u'constructed. Only one of body or automaticColumns can be specified.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'body',
-        None,
-        u'Body of the template. It contains HTML with {column_name} to insert'
-        u' values from a particular column. The body is sanitized to remove '
-        u'certain tags, e.g., script. Only one of body or automaticColumns '
-        u'can be specified.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'fusiontables#template',
-        u'Type name: a template for the info window contents. The template '
-        u'can either include an HTML body or a list of columns from which the'
-        u' template is computed automatically.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'name',
-        None,
-        u'Optional name assigned to a template.',
-        flag_values=fv)
-
-  def RunWithArgs(self, tableId, templateId):
-    """Updates an existing template
-
-    Args:
-      tableId: Identifier for the table for which the template is defined.
-      templateId: Identifier for the template, unique within the context of a
-        particular table.
-
-    Flags:
-      automaticColumnNames: List of columns from which the template is to be
-        automatically constructed. Only one of body or automaticColumns can be
-        specified.
-      body: Body of the template. It contains HTML with {column_name} to
-        insert values from a particular column. The body is sanitized to
-        remove certain tags, e.g., script. Only one of body or
-        automaticColumns can be specified.
-      kind: Type name: a template for the info window contents. The template
-        can either include an HTML body or a list of columns from which the
-        template is computed automatically.
-      name: Optional name assigned to a template.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.Template(
-        tableId=tableId.decode('utf8'),
-        templateId=templateId,
-        )
-    if FLAGS['automaticColumnNames'].present:
-      request.automaticColumnNames = [x.decode('utf8') for x in FLAGS.automaticColumnNames]
-    if FLAGS['body'].present:
-      request.body = FLAGS.body.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['name'].present:
-      request.name = FLAGS.name.decode('utf8')
-    result = client.template.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-def main(_):
-  appcommands.AddCmd('pyshell', PyShell)
-  appcommands.AddCmd('column_delete', ColumnDelete)
-  appcommands.AddCmd('column_get', ColumnGet)
-  appcommands.AddCmd('column_insert', ColumnInsert)
-  appcommands.AddCmd('column_list', ColumnList)
-  appcommands.AddCmd('column_patch', ColumnPatch)
-  appcommands.AddCmd('column_update', ColumnUpdate)
-  appcommands.AddCmd('query_sql', QuerySql)
-  appcommands.AddCmd('query_sqlGet', QuerySqlGet)
-  appcommands.AddCmd('style_delete', StyleDelete)
-  appcommands.AddCmd('style_get', StyleGet)
-  appcommands.AddCmd('style_insert', StyleInsert)
-  appcommands.AddCmd('style_list', StyleList)
-  appcommands.AddCmd('style_patch', StylePatch)
-  appcommands.AddCmd('style_update', StyleUpdate)
-  appcommands.AddCmd('table_copy', TableCopy)
-  appcommands.AddCmd('table_delete', TableDelete)
-  appcommands.AddCmd('table_get', TableGet)
-  appcommands.AddCmd('table_importRows', TableImportRows)
-  appcommands.AddCmd('table_importTable', TableImportTable)
-  appcommands.AddCmd('table_insert', TableInsert)
-  appcommands.AddCmd('table_list', TableList)
-  appcommands.AddCmd('table_patch', TablePatch)
-  appcommands.AddCmd('table_update', TableUpdate)
-  appcommands.AddCmd('task_delete', TaskDelete)
-  appcommands.AddCmd('task_get', TaskGet)
-  appcommands.AddCmd('task_list', TaskList)
-  appcommands.AddCmd('template_delete', TemplateDelete)
-  appcommands.AddCmd('template_get', TemplateGet)
-  appcommands.AddCmd('template_insert', TemplateInsert)
-  appcommands.AddCmd('template_list', TemplateList)
-  appcommands.AddCmd('template_patch', TemplatePatch)
-  appcommands.AddCmd('template_update', TemplateUpdate)
-
-  apitools_base_cli.SetupLogger()
-  if hasattr(appcommands, 'SetDefaultCommand'):
-    appcommands.SetDefaultCommand('pyshell')
-
-
-run_main = apitools_base_cli.run_main
-
-if __name__ == '__main__':
-  appcommands.Run()
diff --git a/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_client.py b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_client.py
index 3376aa3..f80fb3e 100644
--- a/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_client.py
+++ b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_client.py
@@ -24,7 +24,7 @@
                get_credentials=True, http=None, model=None,
                log_request=False, log_response=False,
                credentials_args=None, default_global_params=None,
-               additional_http_headers=None):
+               additional_http_headers=None, response_encoding=None):
     """Create a new fusiontables handle."""
     url = url or self.BASE_URL
     super(FusiontablesV1, self).__init__(
@@ -33,7 +33,8 @@
         log_request=log_request, log_response=log_response,
         credentials_args=credentials_args,
         default_global_params=default_global_params,
-        additional_http_headers=additional_http_headers)
+        additional_http_headers=additional_http_headers,
+        response_encoding=response_encoding)
     self.column = self.ColumnService(self)
     self.query = self.QueryService(self)
     self.style = self.StyleService(self)
@@ -52,7 +53,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Deletes the column.
+      r"""Deletes the column.
 
       Args:
         request: (FusiontablesColumnDeleteRequest) input message
@@ -78,7 +79,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Retrieves a specific column by its id.
+      r"""Retrieves a specific column by its id.
 
       Args:
         request: (FusiontablesColumnGetRequest) input message
@@ -104,7 +105,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Adds a new column to the table.
+      r"""Adds a new column to the table.
 
       Args:
         request: (FusiontablesColumnInsertRequest) input message
@@ -130,7 +131,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves a list of columns.
+      r"""Retrieves a list of columns.
 
       Args:
         request: (FusiontablesColumnListRequest) input message
@@ -156,7 +157,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates the name or type of an existing column. This method supports patch semantics.
+      r"""Updates the name or type of an existing column. This method supports patch semantics.
 
       Args:
         request: (FusiontablesColumnPatchRequest) input message
@@ -182,7 +183,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates the name or type of an existing column.
+      r"""Updates the name or type of an existing column.
 
       Args:
         request: (FusiontablesColumnUpdateRequest) input message
@@ -218,7 +219,7 @@
           }
 
     def Sql(self, request, global_params=None, download=None):
-      """Executes an SQL SELECT/INSERT/UPDATE/DELETE/SHOW/DESCRIBE/CREATE statement.
+      r"""Executes an SQL SELECT/INSERT/UPDATE/DELETE/SHOW/DESCRIBE/CREATE statement.
 
       Args:
         request: (FusiontablesQuerySqlRequest) input message
@@ -247,7 +248,7 @@
     )
 
     def SqlGet(self, request, global_params=None, download=None):
-      """Executes an SQL SELECT/SHOW/DESCRIBE statement.
+      r"""Executes an SQL SELECT/SHOW/DESCRIBE statement.
 
       Args:
         request: (FusiontablesQuerySqlGetRequest) input message
@@ -286,7 +287,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Deletes a style.
+      r"""Deletes a style.
 
       Args:
         request: (FusiontablesStyleDeleteRequest) input message
@@ -312,7 +313,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Gets a specific style.
+      r"""Gets a specific style.
 
       Args:
         request: (FusiontablesStyleGetRequest) input message
@@ -338,7 +339,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Adds a new style for the table.
+      r"""Adds a new style for the table.
 
       Args:
         request: (StyleSetting) input message
@@ -364,7 +365,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves a list of styles.
+      r"""Retrieves a list of styles.
 
       Args:
         request: (FusiontablesStyleListRequest) input message
@@ -390,7 +391,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates an existing style. This method supports patch semantics.
+      r"""Updates an existing style. This method supports patch semantics.
 
       Args:
         request: (StyleSetting) input message
@@ -416,7 +417,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates an existing style.
+      r"""Updates an existing style.
 
       Args:
         request: (StyleSetting) input message
@@ -468,7 +469,7 @@
           }
 
     def Copy(self, request, global_params=None):
-      """Copies a table.
+      r"""Copies a table.
 
       Args:
         request: (FusiontablesTableCopyRequest) input message
@@ -494,7 +495,7 @@
     )
 
     def Delete(self, request, global_params=None):
-      """Deletes a table.
+      r"""Deletes a table.
 
       Args:
         request: (FusiontablesTableDeleteRequest) input message
@@ -520,7 +521,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Retrieves a specific table by its id.
+      r"""Retrieves a specific table by its id.
 
       Args:
         request: (FusiontablesTableGetRequest) input message
@@ -546,7 +547,7 @@
     )
 
     def ImportRows(self, request, global_params=None, upload=None):
-      """Import more rows into a table.
+      r"""Import more rows into a table.
 
       Args:
         request: (FusiontablesTableImportRowsRequest) input message
@@ -576,7 +577,7 @@
     )
 
     def ImportTable(self, request, global_params=None, upload=None):
-      """Import a new table.
+      r"""Import a new table.
 
       Args:
         request: (FusiontablesTableImportTableRequest) input message
@@ -606,7 +607,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Creates a new table.
+      r"""Creates a new table.
 
       Args:
         request: (Table) input message
@@ -632,7 +633,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves a list of tables a user owns.
+      r"""Retrieves a list of tables a user owns.
 
       Args:
         request: (FusiontablesTableListRequest) input message
@@ -658,7 +659,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated. This method supports patch semantics.
+      r"""Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated. This method supports patch semantics.
 
       Args:
         request: (FusiontablesTablePatchRequest) input message
@@ -684,7 +685,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated.
+      r"""Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated.
 
       Args:
         request: (FusiontablesTableUpdateRequest) input message
@@ -720,7 +721,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Deletes the task, unless already started.
+      r"""Deletes the task, unless already started.
 
       Args:
         request: (FusiontablesTaskDeleteRequest) input message
@@ -746,7 +747,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Retrieves a specific task by its id.
+      r"""Retrieves a specific task by its id.
 
       Args:
         request: (FusiontablesTaskGetRequest) input message
@@ -772,7 +773,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves a list of tasks.
+      r"""Retrieves a list of tasks.
 
       Args:
         request: (FusiontablesTaskListRequest) input message
@@ -808,7 +809,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Deletes a template.
+      r"""Deletes a template.
 
       Args:
         request: (FusiontablesTemplateDeleteRequest) input message
@@ -834,7 +835,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Retrieves a specific template by its id.
+      r"""Retrieves a specific template by its id.
 
       Args:
         request: (FusiontablesTemplateGetRequest) input message
@@ -860,7 +861,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Creates a new template for the table.
+      r"""Creates a new template for the table.
 
       Args:
         request: (Template) input message
@@ -886,7 +887,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves a list of templates.
+      r"""Retrieves a list of templates.
 
       Args:
         request: (FusiontablesTemplateListRequest) input message
@@ -912,7 +913,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates an existing template. This method supports patch semantics.
+      r"""Updates an existing template. This method supports patch semantics.
 
       Args:
         request: (Template) input message
@@ -938,7 +939,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates an existing template.
+      r"""Updates an existing template.
 
       Args:
         request: (Template) input message
diff --git a/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_messages.py b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_messages.py
index 15f878e..69f2cfb 100644
--- a/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_messages.py
+++ b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_messages.py
@@ -12,7 +12,7 @@
 
 
 class Bucket(_messages.Message):
-  """Specifies the minimum and maximum values, the color, opacity, icon and
+  r"""Specifies the minimum and maximum values, the color, opacity, icon and
   weight of a bucket within a StyleSetting.
 
   Fields:
@@ -35,7 +35,7 @@
 
 
 class Column(_messages.Message):
-  """Specifies the id, name and type of a column in a table.
+  r"""Specifies the id, name and type of a column in a table.
 
   Messages:
     BaseColumnValue: Optional identifier of the base column. If present, this
@@ -55,7 +55,7 @@
   """
 
   class BaseColumnValue(_messages.Message):
-    """Optional identifier of the base column. If present, this column is
+    r"""Optional identifier of the base column. If present, this column is
     derived from the specified base column.
 
     Fields:
@@ -78,7 +78,7 @@
 
 
 class ColumnList(_messages.Message):
-  """Represents a list of columns in a table.
+  r"""Represents a list of columns in a table.
 
   Fields:
     items: List of all requested columns.
@@ -95,7 +95,7 @@
 
 
 class FusiontablesColumnDeleteRequest(_messages.Message):
-  """A FusiontablesColumnDeleteRequest object.
+  r"""A FusiontablesColumnDeleteRequest object.
 
   Fields:
     columnId: Name or identifier for the column being deleted.
@@ -107,11 +107,11 @@
 
 
 class FusiontablesColumnDeleteResponse(_messages.Message):
-  """An empty FusiontablesColumnDelete response."""
+  r"""An empty FusiontablesColumnDelete response."""
 
 
 class FusiontablesColumnGetRequest(_messages.Message):
-  """A FusiontablesColumnGetRequest object.
+  r"""A FusiontablesColumnGetRequest object.
 
   Fields:
     columnId: Name or identifier for the column that is being requested.
@@ -123,7 +123,7 @@
 
 
 class FusiontablesColumnInsertRequest(_messages.Message):
-  """A FusiontablesColumnInsertRequest object.
+  r"""A FusiontablesColumnInsertRequest object.
 
   Fields:
     column: A Column resource to be passed as the request body.
@@ -135,7 +135,7 @@
 
 
 class FusiontablesColumnListRequest(_messages.Message):
-  """A FusiontablesColumnListRequest object.
+  r"""A FusiontablesColumnListRequest object.
 
   Fields:
     maxResults: Maximum number of columns to return. Optional. Default is 5.
@@ -150,7 +150,7 @@
 
 
 class FusiontablesColumnPatchRequest(_messages.Message):
-  """A FusiontablesColumnPatchRequest object.
+  r"""A FusiontablesColumnPatchRequest object.
 
   Fields:
     column: A Column resource to be passed as the request body.
@@ -164,7 +164,7 @@
 
 
 class FusiontablesColumnUpdateRequest(_messages.Message):
-  """A FusiontablesColumnUpdateRequest object.
+  r"""A FusiontablesColumnUpdateRequest object.
 
   Fields:
     column: A Column resource to be passed as the request body.
@@ -178,7 +178,7 @@
 
 
 class FusiontablesQuerySqlGetRequest(_messages.Message):
-  """A FusiontablesQuerySqlGetRequest object.
+  r"""A FusiontablesQuerySqlGetRequest object.
 
   Fields:
     hdrs: Should column names be included (in the first row)?. Default is
@@ -195,7 +195,7 @@
 
 
 class FusiontablesQuerySqlRequest(_messages.Message):
-  """A FusiontablesQuerySqlRequest object.
+  r"""A FusiontablesQuerySqlRequest object.
 
   Fields:
     hdrs: Should column names be included (in the first row)?. Default is
@@ -212,7 +212,7 @@
 
 
 class FusiontablesStyleDeleteRequest(_messages.Message):
-  """A FusiontablesStyleDeleteRequest object.
+  r"""A FusiontablesStyleDeleteRequest object.
 
   Fields:
     styleId: Identifier (within a table) for the style being deleted
@@ -224,11 +224,11 @@
 
 
 class FusiontablesStyleDeleteResponse(_messages.Message):
-  """An empty FusiontablesStyleDelete response."""
+  r"""An empty FusiontablesStyleDelete response."""
 
 
 class FusiontablesStyleGetRequest(_messages.Message):
-  """A FusiontablesStyleGetRequest object.
+  r"""A FusiontablesStyleGetRequest object.
 
   Fields:
     styleId: Identifier (integer) for a specific style in a table
@@ -240,7 +240,7 @@
 
 
 class FusiontablesStyleListRequest(_messages.Message):
-  """A FusiontablesStyleListRequest object.
+  r"""A FusiontablesStyleListRequest object.
 
   Fields:
     maxResults: Maximum number of styles to return. Optional. Default is 5.
@@ -255,7 +255,7 @@
 
 
 class FusiontablesTableCopyRequest(_messages.Message):
-  """A FusiontablesTableCopyRequest object.
+  r"""A FusiontablesTableCopyRequest object.
 
   Fields:
     copyPresentation: Whether to also copy tabs, styles, and templates.
@@ -268,7 +268,7 @@
 
 
 class FusiontablesTableDeleteRequest(_messages.Message):
-  """A FusiontablesTableDeleteRequest object.
+  r"""A FusiontablesTableDeleteRequest object.
 
   Fields:
     tableId: ID of the table that is being deleted.
@@ -278,11 +278,11 @@
 
 
 class FusiontablesTableDeleteResponse(_messages.Message):
-  """An empty FusiontablesTableDelete response."""
+  r"""An empty FusiontablesTableDelete response."""
 
 
 class FusiontablesTableGetRequest(_messages.Message):
-  """A FusiontablesTableGetRequest object.
+  r"""A FusiontablesTableGetRequest object.
 
   Fields:
     tableId: Identifier(ID) for the table being requested.
@@ -292,7 +292,7 @@
 
 
 class FusiontablesTableImportRowsRequest(_messages.Message):
-  """A FusiontablesTableImportRowsRequest object.
+  r"""A FusiontablesTableImportRowsRequest object.
 
   Fields:
     delimiter: The delimiter used to separate cell values. This can only
@@ -324,7 +324,7 @@
 
 
 class FusiontablesTableImportTableRequest(_messages.Message):
-  """A FusiontablesTableImportTableRequest object.
+  r"""A FusiontablesTableImportTableRequest object.
 
   Fields:
     delimiter: The delimiter used to separate cell values. This can only
@@ -340,7 +340,7 @@
 
 
 class FusiontablesTableListRequest(_messages.Message):
-  """A FusiontablesTableListRequest object.
+  r"""A FusiontablesTableListRequest object.
 
   Fields:
     maxResults: Maximum number of styles to return. Optional. Default is 5.
@@ -353,7 +353,7 @@
 
 
 class FusiontablesTablePatchRequest(_messages.Message):
-  """A FusiontablesTablePatchRequest object.
+  r"""A FusiontablesTablePatchRequest object.
 
   Fields:
     replaceViewDefinition: Should the view definition also be updated? The
@@ -369,7 +369,7 @@
 
 
 class FusiontablesTableUpdateRequest(_messages.Message):
-  """A FusiontablesTableUpdateRequest object.
+  r"""A FusiontablesTableUpdateRequest object.
 
   Fields:
     replaceViewDefinition: Should the view definition also be updated? The
@@ -385,7 +385,7 @@
 
 
 class FusiontablesTaskDeleteRequest(_messages.Message):
-  """A FusiontablesTaskDeleteRequest object.
+  r"""A FusiontablesTaskDeleteRequest object.
 
   Fields:
     tableId: Table from which the task is being deleted.
@@ -397,11 +397,11 @@
 
 
 class FusiontablesTaskDeleteResponse(_messages.Message):
-  """An empty FusiontablesTaskDelete response."""
+  r"""An empty FusiontablesTaskDelete response."""
 
 
 class FusiontablesTaskGetRequest(_messages.Message):
-  """A FusiontablesTaskGetRequest object.
+  r"""A FusiontablesTaskGetRequest object.
 
   Fields:
     tableId: Table to which the task belongs.
@@ -413,7 +413,7 @@
 
 
 class FusiontablesTaskListRequest(_messages.Message):
-  """A FusiontablesTaskListRequest object.
+  r"""A FusiontablesTaskListRequest object.
 
   Fields:
     maxResults: Maximum number of columns to return. Optional. Default is 5.
@@ -429,7 +429,7 @@
 
 
 class FusiontablesTemplateDeleteRequest(_messages.Message):
-  """A FusiontablesTemplateDeleteRequest object.
+  r"""A FusiontablesTemplateDeleteRequest object.
 
   Fields:
     tableId: Table from which the template is being deleted
@@ -441,11 +441,11 @@
 
 
 class FusiontablesTemplateDeleteResponse(_messages.Message):
-  """An empty FusiontablesTemplateDelete response."""
+  r"""An empty FusiontablesTemplateDelete response."""
 
 
 class FusiontablesTemplateGetRequest(_messages.Message):
-  """A FusiontablesTemplateGetRequest object.
+  r"""A FusiontablesTemplateGetRequest object.
 
   Fields:
     tableId: Table to which the template belongs
@@ -457,7 +457,7 @@
 
 
 class FusiontablesTemplateListRequest(_messages.Message):
-  """A FusiontablesTemplateListRequest object.
+  r"""A FusiontablesTemplateListRequest object.
 
   Fields:
     maxResults: Maximum number of templates to return. Optional. Default is 5.
@@ -472,7 +472,7 @@
 
 
 class Geometry(_messages.Message):
-  """Represents a Geometry object.
+  r"""Represents a Geometry object.
 
   Fields:
     geometries: The list of geometries in this geometry collection.
@@ -486,7 +486,7 @@
 
 
 class Import(_messages.Message):
-  """Represents an import request.
+  r"""Represents an import request.
 
   Fields:
     kind: Type name: a template for an import request.
@@ -498,7 +498,7 @@
 
 
 class Line(_messages.Message):
-  """Represents a line geometry.
+  r"""Represents a line geometry.
 
   Messages:
     CoordinatesValueListEntry: Single entry in a CoordinatesValue.
@@ -509,7 +509,7 @@
   """
 
   class CoordinatesValueListEntry(_messages.Message):
-    """Single entry in a CoordinatesValue.
+    r"""Single entry in a CoordinatesValue.
 
     Fields:
       entry: A number attribute.
@@ -522,7 +522,7 @@
 
 
 class LineStyle(_messages.Message):
-  """Represents a LineStyle within a StyleSetting
+  r"""Represents a LineStyle within a StyleSetting
 
   Fields:
     strokeColor: Color of the line in #RRGGBB format.
@@ -542,7 +542,7 @@
 
 
 class Point(_messages.Message):
-  """Represents a point object.
+  r"""Represents a point object.
 
   Fields:
     coordinates: The coordinates that define the point.
@@ -554,7 +554,7 @@
 
 
 class PointStyle(_messages.Message):
-  """Represents a PointStyle within a StyleSetting
+  r"""Represents a PointStyle within a StyleSetting
 
   Fields:
     iconName: Name of the icon. Use values defined in
@@ -568,7 +568,7 @@
 
 
 class Polygon(_messages.Message):
-  """Represents a polygon object.
+  r"""Represents a polygon object.
 
   Messages:
     CoordinatesValueListEntry: Single entry in a CoordinatesValue.
@@ -579,7 +579,7 @@
   """
 
   class CoordinatesValueListEntry(_messages.Message):
-    """Single entry in a CoordinatesValue.
+    r"""Single entry in a CoordinatesValue.
 
     Messages:
       EntryValueListEntry: Single entry in a EntryValue.
@@ -589,7 +589,7 @@
     """
 
     class EntryValueListEntry(_messages.Message):
-      """Single entry in a EntryValue.
+      r"""Single entry in a EntryValue.
 
       Fields:
         entry: A number attribute.
@@ -604,7 +604,7 @@
 
 
 class PolygonStyle(_messages.Message):
-  """Represents a PolygonStyle within a StyleSetting
+  r"""Represents a PolygonStyle within a StyleSetting
 
   Fields:
     fillColor: Color of the interior of the polygon in #RRGGBB format.
@@ -633,7 +633,7 @@
 
 
 class Sqlresponse(_messages.Message):
-  """Represents a response to an sql statement.
+  r"""Represents a response to an sql statement.
 
   Messages:
     RowsValueListEntry: Single entry in a RowsValue.
@@ -647,7 +647,7 @@
   """
 
   class RowsValueListEntry(_messages.Message):
-    """Single entry in a RowsValue.
+    r"""Single entry in a RowsValue.
 
     Fields:
       entry: A extra_types.JsonValue attribute.
@@ -661,7 +661,7 @@
 
 
 class StandardQueryParameters(_messages.Message):
-  """Query parameters accepted by all methods.
+  r"""Query parameters accepted by all methods.
 
   Enums:
     AltValueValuesEnum: Data format for the response.
@@ -684,7 +684,7 @@
   """
 
   class AltValueValuesEnum(_messages.Enum):
-    """Data format for the response.
+    r"""Data format for the response.
 
     Values:
       csv: Responses with Content-Type of text/csv
@@ -704,7 +704,7 @@
 
 
 class StyleFunction(_messages.Message):
-  """Represents a StyleFunction within a StyleSetting
+  r"""Represents a StyleFunction within a StyleSetting
 
   Messages:
     GradientValue: Gradient function that interpolates a range of colors based
@@ -726,7 +726,7 @@
   """
 
   class GradientValue(_messages.Message):
-    """Gradient function that interpolates a range of colors based on column
+    r"""Gradient function that interpolates a range of colors based on column
     value.
 
     Messages:
@@ -741,7 +741,7 @@
     """
 
     class ColorsValueListEntry(_messages.Message):
-      """A ColorsValueListEntry object.
+      r"""A ColorsValueListEntry object.
 
       Fields:
         color: Color in #RRGGBB format.
@@ -762,7 +762,7 @@
 
 
 class StyleSetting(_messages.Message):
-  """Represents a complete StyleSettings object. The primary key is a
+  r"""Represents a complete StyleSettings object. The primary key is a
   combination of the tableId and a styleId.
 
   Fields:
@@ -788,7 +788,7 @@
 
 
 class StyleSettingList(_messages.Message):
-  """Represents a list of styles for a given table.
+  r"""Represents a list of styles for a given table.
 
   Fields:
     items: All requested style settings.
@@ -805,7 +805,7 @@
 
 
 class Table(_messages.Message):
-  """Represents a table. Specifies the name, whether it is exportable,
+  r"""Represents a table. Specifies the name, whether it is exportable,
   description, attribution, and attribution link.
 
   Fields:
@@ -835,7 +835,7 @@
 
 
 class TableList(_messages.Message):
-  """Represents a list of tables.
+  r"""Represents a list of tables.
 
   Fields:
     items: List of all requested tables.
@@ -850,7 +850,7 @@
 
 
 class Task(_messages.Message):
-  """Specifies the identifier, name, and type of a task in a table.
+  r"""Specifies the identifier, name, and type of a task in a table.
 
   Fields:
     kind: Type of the resource. This is always "fusiontables#task".
@@ -873,7 +873,7 @@
 
 
 class TaskList(_messages.Message):
-  """Represents a list of tasks for a table.
+  r"""Represents a list of tasks for a table.
 
   Fields:
     items: List of all requested tasks.
@@ -890,7 +890,7 @@
 
 
 class Template(_messages.Message):
-  """Represents the contents of InfoWindow templates.
+  r"""Represents the contents of InfoWindow templates.
 
   Fields:
     automaticColumnNames: List of columns from which the template is to be
@@ -918,7 +918,7 @@
 
 
 class TemplateList(_messages.Message):
-  """Represents a list of templates for a given table.
+  r"""Represents a list of templates for a given table.
 
   Fields:
     items: List of all requested templates.
diff --git a/samples/iam_sample/iam_v1/iam_v1.py b/samples/iam_sample/iam_v1/iam_v1.py
deleted file mode 100644
index da9750e..0000000
--- a/samples/iam_sample/iam_v1/iam_v1.py
+++ /dev/null
@@ -1,921 +0,0 @@
-#!/usr/bin/env python
-"""CLI for iam, version v1."""
-# NOTE: This file is autogenerated and should not be edited by hand.
-
-import code
-import os
-import platform
-import sys
-
-from apitools.base.protorpclite import message_types
-from apitools.base.protorpclite import messages
-
-from google.apputils import appcommands
-import gflags as flags
-
-import apitools.base.py as apitools_base
-from apitools.base.py import cli as apitools_base_cli
-import iam_v1_client as client_lib
-import iam_v1_messages as messages
-
-
-def _DeclareIamFlags():
-  """Declare global flags in an idempotent way."""
-  if 'api_endpoint' in flags.FLAGS:
-    return
-  flags.DEFINE_string(
-      'api_endpoint',
-      u'https://iam.googleapis.com/',
-      'URL of the API endpoint to use.',
-      short_name='iam_url')
-  flags.DEFINE_string(
-      'history_file',
-      u'~/.iam.v1.history',
-      'File with interactive shell history.')
-  flags.DEFINE_multistring(
-      'add_header', [],
-      'Additional http headers (as key=value strings). '
-      'Can be specified multiple times.')
-  flags.DEFINE_string(
-      'service_account_json_keyfile', '',
-      'Filename for a JSON service account key downloaded'
-      ' from the Developer Console.')
-  flags.DEFINE_enum(
-      'f__xgafv',
-      u'_1',
-      [u'_1', u'_2'],
-      u'V1 error format.')
-  flags.DEFINE_string(
-      'access_token',
-      None,
-      u'OAuth access token.')
-  flags.DEFINE_enum(
-      'alt',
-      u'json',
-      [u'json', u'media', u'proto'],
-      u'Data format for response.')
-  flags.DEFINE_string(
-      'bearer_token',
-      None,
-      u'OAuth bearer token.')
-  flags.DEFINE_string(
-      'callback',
-      None,
-      u'JSONP')
-  flags.DEFINE_string(
-      'fields',
-      None,
-      u'Selector specifying which fields to include in a partial response.')
-  flags.DEFINE_string(
-      'key',
-      None,
-      u'API key. Your API key identifies your project and provides you with '
-      u'API access, quota, and reports. Required unless you provide an OAuth '
-      u'2.0 token.')
-  flags.DEFINE_string(
-      'oauth_token',
-      None,
-      u'OAuth 2.0 token for the current user.')
-  flags.DEFINE_boolean(
-      'pp',
-      'True',
-      u'Pretty-print response.')
-  flags.DEFINE_boolean(
-      'prettyPrint',
-      'True',
-      u'Returns response with indentations and line breaks.')
-  flags.DEFINE_string(
-      'quotaUser',
-      None,
-      u'Available to use for quota purposes for server-side applications. Can'
-      u' be any arbitrary string assigned to a user, but should not exceed 40'
-      u' characters.')
-  flags.DEFINE_string(
-      'trace',
-      None,
-      'A tracing token of the form "token:<tokenid>" to include in api '
-      'requests.')
-  flags.DEFINE_string(
-      'uploadType',
-      None,
-      u'Legacy upload protocol for media (e.g. "media", "multipart").')
-  flags.DEFINE_string(
-      'upload_protocol',
-      None,
-      u'Upload protocol for media (e.g. "raw", "multipart").')
-
-
-FLAGS = flags.FLAGS
-apitools_base_cli.DeclareBaseFlags()
-_DeclareIamFlags()
-
-
-def GetGlobalParamsFromFlags():
-  """Return a StandardQueryParameters based on flags."""
-  result = messages.StandardQueryParameters()
-  if FLAGS['f__xgafv'].present:
-    result.f__xgafv = messages.StandardQueryParameters.FXgafvValueValuesEnum(FLAGS.f__xgafv)
-  if FLAGS['access_token'].present:
-    result.access_token = FLAGS.access_token.decode('utf8')
-  if FLAGS['alt'].present:
-    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
-  if FLAGS['bearer_token'].present:
-    result.bearer_token = FLAGS.bearer_token.decode('utf8')
-  if FLAGS['callback'].present:
-    result.callback = FLAGS.callback.decode('utf8')
-  if FLAGS['fields'].present:
-    result.fields = FLAGS.fields.decode('utf8')
-  if FLAGS['key'].present:
-    result.key = FLAGS.key.decode('utf8')
-  if FLAGS['oauth_token'].present:
-    result.oauth_token = FLAGS.oauth_token.decode('utf8')
-  if FLAGS['pp'].present:
-    result.pp = FLAGS.pp
-  if FLAGS['prettyPrint'].present:
-    result.prettyPrint = FLAGS.prettyPrint
-  if FLAGS['quotaUser'].present:
-    result.quotaUser = FLAGS.quotaUser.decode('utf8')
-  if FLAGS['trace'].present:
-    result.trace = FLAGS.trace.decode('utf8')
-  if FLAGS['uploadType'].present:
-    result.uploadType = FLAGS.uploadType.decode('utf8')
-  if FLAGS['upload_protocol'].present:
-    result.upload_protocol = FLAGS.upload_protocol.decode('utf8')
-  return result
-
-
-def GetClientFromFlags():
-  """Return a client object, configured from flags."""
-  log_request = FLAGS.log_request or FLAGS.log_request_response
-  log_response = FLAGS.log_response or FLAGS.log_request_response
-  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
-  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
-  credentials_args = {
-      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
-  }
-  try:
-    client = client_lib.IamV1(
-        api_endpoint, log_request=log_request,
-        log_response=log_response,
-        credentials_args=credentials_args,
-        additional_http_headers=additional_http_headers)
-  except apitools_base.CredentialsError as e:
-    print 'Error creating credentials: %s' % e
-    sys.exit(1)
-  return client
-
-
-class PyShell(appcommands.Cmd):
-
-  def Run(self, _):
-    """Run an interactive python shell with the client."""
-    client = GetClientFromFlags()
-    params = GetGlobalParamsFromFlags()
-    for field in params.all_fields():
-      value = params.get_assigned_value(field.name)
-      if value != field.default:
-        client.AddGlobalParam(field.name, value)
-    banner = """
-           == iam interactive console ==
-                 client: a iam client
-          apitools_base: base apitools module
-         messages: the generated messages module
-    """
-    local_vars = {
-        'apitools_base': apitools_base,
-        'client': client,
-        'client_lib': client_lib,
-        'messages': messages,
-    }
-    if platform.system() == 'Linux':
-      console = apitools_base_cli.ConsoleWithReadline(
-          local_vars, histfile=FLAGS.history_file)
-    else:
-      console = code.InteractiveConsole(local_vars)
-    try:
-      console.interact(banner)
-    except SystemExit as e:
-      return e.code
-
-
-class IamPoliciesGetPolicyDetails(apitools_base_cli.NewCmd):
-  """Command wrapping iamPolicies.GetPolicyDetails."""
-
-  usage = """iamPolicies_getPolicyDetails"""
-
-  def __init__(self, name, fv):
-    super(IamPoliciesGetPolicyDetails, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'fullResourcePath',
-        None,
-        u'REQUIRED: The full resource path of the current policy being '
-        u'requested, e.g., `//dataflow.googleapis.com/projects/../jobs/..`.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'pageSize',
-        None,
-        u'Limit on the number of policies to include in the response. Further'
-        u' accounts can subsequently be obtained by including the '
-        u'GetPolicyDetailsResponse.next_page_token in a subsequent request. '
-        u'If zero, the default page size 20 will be used. Must be given a '
-        u'value in range [0, 100], otherwise an invalid argument error will '
-        u'be returned.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Optional pagination token returned in an earlier '
-        u'GetPolicyDetailsResponse.next_page_token response.',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """Returns the current IAM policy and the policies on the inherited
-    resources that the user has access to.
-
-    Flags:
-      fullResourcePath: REQUIRED: The full resource path of the current policy
-        being requested, e.g.,
-        `//dataflow.googleapis.com/projects/../jobs/..`.
-      pageSize: Limit on the number of policies to include in the response.
-        Further accounts can subsequently be obtained by including the
-        GetPolicyDetailsResponse.next_page_token in a subsequent request. If
-        zero, the default page size 20 will be used. Must be given a value in
-        range [0, 100], otherwise an invalid argument error will be returned.
-      pageToken: Optional pagination token returned in an earlier
-        GetPolicyDetailsResponse.next_page_token response.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.GetPolicyDetailsRequest(
-        )
-    if FLAGS['fullResourcePath'].present:
-      request.fullResourcePath = FLAGS.fullResourcePath.decode('utf8')
-    if FLAGS['pageSize'].present:
-      request.pageSize = FLAGS.pageSize
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.iamPolicies.GetPolicyDetails(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsCreate(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.Create."""
-
-  usage = """projects_serviceAccounts_create <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsCreate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'createServiceAccountRequest',
-        None,
-        u'A CreateServiceAccountRequest resource to be passed as the request '
-        u'body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, name):
-    """Creates a ServiceAccount and returns it.
-
-    Args:
-      name: Required. The resource name of the project associated with the
-        service accounts, such as `projects/my-project-123`.
-
-    Flags:
-      createServiceAccountRequest: A CreateServiceAccountRequest resource to
-        be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsCreateRequest(
-        name=name.decode('utf8'),
-        )
-    if FLAGS['createServiceAccountRequest'].present:
-      request.createServiceAccountRequest = apitools_base.JsonToMessage(messages.CreateServiceAccountRequest, FLAGS.createServiceAccountRequest)
-    result = client.projects_serviceAccounts.Create(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsDelete(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.Delete."""
-
-  usage = """projects_serviceAccounts_delete <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, name):
-    """Deletes a ServiceAccount.
-
-    Args:
-      name: The resource name of the service account in the following format:
-        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
-        wildcard for the project will infer the project from the account. The
-        `account` value can be the `email` address or the `unique_id` of the
-        service account.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsDeleteRequest(
-        name=name.decode('utf8'),
-        )
-    result = client.projects_serviceAccounts.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsGet(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.Get."""
-
-  usage = """projects_serviceAccounts_get <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, name):
-    """Gets a ServiceAccount.
-
-    Args:
-      name: The resource name of the service account in the following format:
-        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
-        wildcard for the project will infer the project from the account. The
-        `account` value can be the `email` address or the `unique_id` of the
-        service account.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsGetRequest(
-        name=name.decode('utf8'),
-        )
-    result = client.projects_serviceAccounts.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsGetIamPolicy(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.GetIamPolicy."""
-
-  usage = """projects_serviceAccounts_getIamPolicy <resource>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsGetIamPolicy, self).__init__(name, fv)
-
-  def RunWithArgs(self, resource):
-    """Returns the IAM access control policy for specified IAM resource.
-
-    Args:
-      resource: REQUIRED: The resource for which the policy is being
-        requested. `resource` is usually specified as a path, such as
-        `projects/*project*/zones/*zone*/disks/*disk*`.  The format for the
-        path specified in this value is resource specific and is specified in
-        the `getIamPolicy` documentation.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsGetIamPolicyRequest(
-        resource=resource.decode('utf8'),
-        )
-    result = client.projects_serviceAccounts.GetIamPolicy(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsList(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.List."""
-
-  usage = """projects_serviceAccounts_list <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'pageSize',
-        None,
-        u'Optional limit on the number of service accounts to include in the '
-        u'response. Further accounts can subsequently be obtained by '
-        u'including the ListServiceAccountsResponse.next_page_token in a '
-        u'subsequent request.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Optional pagination token returned in an earlier '
-        u'ListServiceAccountsResponse.next_page_token.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'removeDeletedServiceAccounts',
-        None,
-        u'Do not list service accounts deleted from Gaia. <b><font '
-        u'color="red">DO NOT INCLUDE IN EXTERNAL DOCUMENTATION</font></b>.',
-        flag_values=fv)
-
-  def RunWithArgs(self, name):
-    """Lists ServiceAccounts for a project.
-
-    Args:
-      name: Required. The resource name of the project associated with the
-        service accounts, such as `projects/my-project-123`.
-
-    Flags:
-      pageSize: Optional limit on the number of service accounts to include in
-        the response. Further accounts can subsequently be obtained by
-        including the ListServiceAccountsResponse.next_page_token in a
-        subsequent request.
-      pageToken: Optional pagination token returned in an earlier
-        ListServiceAccountsResponse.next_page_token.
-      removeDeletedServiceAccounts: Do not list service accounts deleted from
-        Gaia. <b><font color="red">DO NOT INCLUDE IN EXTERNAL
-        DOCUMENTATION</font></b>.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsListRequest(
-        name=name.decode('utf8'),
-        )
-    if FLAGS['pageSize'].present:
-      request.pageSize = FLAGS.pageSize
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['removeDeletedServiceAccounts'].present:
-      request.removeDeletedServiceAccounts = FLAGS.removeDeletedServiceAccounts
-    result = client.projects_serviceAccounts.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsSetIamPolicy(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.SetIamPolicy."""
-
-  usage = """projects_serviceAccounts_setIamPolicy <resource>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsSetIamPolicy, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'setIamPolicyRequest',
-        None,
-        u'A SetIamPolicyRequest resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, resource):
-    """Sets the IAM access control policy for the specified IAM resource.
-
-    Args:
-      resource: REQUIRED: The resource for which the policy is being
-        specified. `resource` is usually specified as a path, such as
-        `projects/*project*/zones/*zone*/disks/*disk*`.  The format for the
-        path specified in this value is resource specific and is specified in
-        the `setIamPolicy` documentation.
-
-    Flags:
-      setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
-        request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsSetIamPolicyRequest(
-        resource=resource.decode('utf8'),
-        )
-    if FLAGS['setIamPolicyRequest'].present:
-      request.setIamPolicyRequest = apitools_base.JsonToMessage(messages.SetIamPolicyRequest, FLAGS.setIamPolicyRequest)
-    result = client.projects_serviceAccounts.SetIamPolicy(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsSignBlob(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.SignBlob."""
-
-  usage = """projects_serviceAccounts_signBlob <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsSignBlob, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'signBlobRequest',
-        None,
-        u'A SignBlobRequest resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, name):
-    """Signs a blob using a service account's system-managed private key.
-
-    Args:
-      name: The resource name of the service account in the following format:
-        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
-        wildcard for the project will infer the project from the account. The
-        `account` value can be the `email` address or the `unique_id` of the
-        service account.
-
-    Flags:
-      signBlobRequest: A SignBlobRequest resource to be passed as the request
-        body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsSignBlobRequest(
-        name=name.decode('utf8'),
-        )
-    if FLAGS['signBlobRequest'].present:
-      request.signBlobRequest = apitools_base.JsonToMessage(messages.SignBlobRequest, FLAGS.signBlobRequest)
-    result = client.projects_serviceAccounts.SignBlob(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsSignJwt(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.SignJwt."""
-
-  usage = """projects_serviceAccounts_signJwt <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsSignJwt, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'signJwtRequest',
-        None,
-        u'A SignJwtRequest resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, name):
-    """Signs a JWT using a service account's system-managed private key.  If
-    no `exp` (expiry) time is contained in the claims, we will provide an
-    expiry of one hour in the future. If an expiry of more than one hour in
-    the future is requested, the request will fail.
-
-    Args:
-      name: The resource name of the service account in the following format:
-        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
-        wildcard for the project will infer the project from the account. The
-        `account` value can be the `email` address or the `unique_id` of the
-        service account.
-
-    Flags:
-      signJwtRequest: A SignJwtRequest resource to be passed as the request
-        body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsSignJwtRequest(
-        name=name.decode('utf8'),
-        )
-    if FLAGS['signJwtRequest'].present:
-      request.signJwtRequest = apitools_base.JsonToMessage(messages.SignJwtRequest, FLAGS.signJwtRequest)
-    result = client.projects_serviceAccounts.SignJwt(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsTestIamPermissions(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.TestIamPermissions."""
-
-  usage = """projects_serviceAccounts_testIamPermissions <resource>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsTestIamPermissions, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'testIamPermissionsRequest',
-        None,
-        u'A TestIamPermissionsRequest resource to be passed as the request '
-        u'body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, resource):
-    """Tests the specified permissions against the IAM access control policy
-    for the specified IAM resource.
-
-    Args:
-      resource: REQUIRED: The resource for which the policy detail is being
-        requested. `resource` is usually specified as a path, such as
-        `projects/*project*/zones/*zone*/disks/*disk*`.  The format for the
-        path specified in this value is resource specific and is specified in
-        the `testIamPermissions` documentation.
-
-    Flags:
-      testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
-        passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsTestIamPermissionsRequest(
-        resource=resource.decode('utf8'),
-        )
-    if FLAGS['testIamPermissionsRequest'].present:
-      request.testIamPermissionsRequest = apitools_base.JsonToMessage(messages.TestIamPermissionsRequest, FLAGS.testIamPermissionsRequest)
-    result = client.projects_serviceAccounts.TestIamPermissions(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts.Update."""
-
-  usage = """projects_serviceAccounts_update <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'description',
-        None,
-        u'Optional. A user-specified opaque description of the service '
-        u'account.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'displayName',
-        None,
-        u'Optional. A user-specified description of the service account.  '
-        u'Must be fewer than 100 UTF-8 bytes.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'email',
-        None,
-        u'@OutputOnly The email address of the service account.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'etag',
-        None,
-        u'Used to perform a consistent read-modify-write.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'oauth2ClientId',
-        None,
-        u'@OutputOnly. The OAuth2 client id for the service account. This is '
-        u'used in conjunction with the OAuth2 clientconfig API to make three '
-        u'legged OAuth2 (3LO) flows to access the data of Google users.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'projectId',
-        None,
-        u'@OutputOnly The id of the project that owns the service account.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'uniqueId',
-        None,
-        u'@OutputOnly The unique and stable id of the service account.',
-        flag_values=fv)
-
-  def RunWithArgs(self, name):
-    """Updates a ServiceAccount.  Currently, only the following fields are
-    updatable: `display_name` . The `etag` is mandatory.
-
-    Args:
-      name: The resource name of the service account in the following format:
-        `projects/{project}/serviceAccounts/{account}`.  Requests using `-` as
-        a wildcard for the project will infer the project from the `account`
-        and the `account` value can be the `email` address or the `unique_id`
-        of the service account.  In responses the resource name will always be
-        in the format `projects/{project}/serviceAccounts/{email}`.
-
-    Flags:
-      description: Optional. A user-specified opaque description of the
-        service account.
-      displayName: Optional. A user-specified description of the service
-        account.  Must be fewer than 100 UTF-8 bytes.
-      email: @OutputOnly The email address of the service account.
-      etag: Used to perform a consistent read-modify-write.
-      oauth2ClientId: @OutputOnly. The OAuth2 client id for the service
-        account. This is used in conjunction with the OAuth2 clientconfig API
-        to make three legged OAuth2 (3LO) flows to access the data of Google
-        users.
-      projectId: @OutputOnly The id of the project that owns the service
-        account.
-      uniqueId: @OutputOnly The unique and stable id of the service account.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServiceAccount(
-        name=name.decode('utf8'),
-        )
-    if FLAGS['description'].present:
-      request.description = FLAGS.description.decode('utf8')
-    if FLAGS['displayName'].present:
-      request.displayName = FLAGS.displayName.decode('utf8')
-    if FLAGS['email'].present:
-      request.email = FLAGS.email.decode('utf8')
-    if FLAGS['etag'].present:
-      request.etag = FLAGS.etag
-    if FLAGS['oauth2ClientId'].present:
-      request.oauth2ClientId = FLAGS.oauth2ClientId.decode('utf8')
-    if FLAGS['projectId'].present:
-      request.projectId = FLAGS.projectId.decode('utf8')
-    if FLAGS['uniqueId'].present:
-      request.uniqueId = FLAGS.uniqueId.decode('utf8')
-    result = client.projects_serviceAccounts.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsKeysCreate(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts_keys.Create."""
-
-  usage = """projects_serviceAccounts_keys_create <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsKeysCreate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'createServiceAccountKeyRequest',
-        None,
-        u'A CreateServiceAccountKeyRequest resource to be passed as the '
-        u'request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, name):
-    """Creates a ServiceAccountKey and returns it.
-
-    Args:
-      name: The resource name of the service account in the following format:
-        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
-        wildcard for the project will infer the project from the account. The
-        `account` value can be the `email` address or the `unique_id` of the
-        service account.
-
-    Flags:
-      createServiceAccountKeyRequest: A CreateServiceAccountKeyRequest
-        resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsKeysCreateRequest(
-        name=name.decode('utf8'),
-        )
-    if FLAGS['createServiceAccountKeyRequest'].present:
-      request.createServiceAccountKeyRequest = apitools_base.JsonToMessage(messages.CreateServiceAccountKeyRequest, FLAGS.createServiceAccountKeyRequest)
-    result = client.projects_serviceAccounts_keys.Create(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsKeysDelete(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts_keys.Delete."""
-
-  usage = """projects_serviceAccounts_keys_delete <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsKeysDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, name):
-    """Deletes a ServiceAccountKey.
-
-    Args:
-      name: The resource name of the service account key in the following
-        format: `projects/{project}/serviceAccounts/{account}/keys/{key}`.
-        Using `-` as a wildcard for the project will infer the project from
-        the account. The `account` value can be the `email` address or the
-        `unique_id` of the service account.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsKeysDeleteRequest(
-        name=name.decode('utf8'),
-        )
-    result = client.projects_serviceAccounts_keys.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsKeysGet(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts_keys.Get."""
-
-  usage = """projects_serviceAccounts_keys_get <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsKeysGet, self).__init__(name, fv)
-    flags.DEFINE_enum(
-        'publicKeyType',
-        u'TYPE_NONE',
-        [u'TYPE_NONE', u'TYPE_X509_PEM_FILE', u'TYPE_RAW_PUBLIC_KEY'],
-        u'The output format of the public key requested. X509_PEM is the '
-        u'default output format.',
-        flag_values=fv)
-
-  def RunWithArgs(self, name):
-    """Gets the ServiceAccountKey by key id.
-
-    Args:
-      name: The resource name of the service account key in the following
-        format: `projects/{project}/serviceAccounts/{account}/keys/{key}`.
-        Using `-` as a wildcard for the project will infer the project from
-        the account. The `account` value can be the `email` address or the
-        `unique_id` of the service account.
-
-    Flags:
-      publicKeyType: The output format of the public key requested. X509_PEM
-        is the default output format.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsKeysGetRequest(
-        name=name.decode('utf8'),
-        )
-    if FLAGS['publicKeyType'].present:
-      request.publicKeyType = messages.IamProjectsServiceAccountsKeysGetRequest.PublicKeyTypeValueValuesEnum(FLAGS.publicKeyType)
-    result = client.projects_serviceAccounts_keys.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ProjectsServiceAccountsKeysList(apitools_base_cli.NewCmd):
-  """Command wrapping projects_serviceAccounts_keys.List."""
-
-  usage = """projects_serviceAccounts_keys_list <name>"""
-
-  def __init__(self, name, fv):
-    super(ProjectsServiceAccountsKeysList, self).__init__(name, fv)
-    flags.DEFINE_enum(
-        'keyTypes',
-        u'KEY_TYPE_UNSPECIFIED',
-        [u'KEY_TYPE_UNSPECIFIED', u'USER_MANAGED', u'SYSTEM_MANAGED'],
-        u'Filters the types of keys the user wants to include in the list '
-        u'response. Duplicate key types are not allowed. If no key type is '
-        u'provided, all keys are returned.',
-        flag_values=fv)
-
-  def RunWithArgs(self, name):
-    """Lists ServiceAccountKeys.
-
-    Args:
-      name: The resource name of the service account in the following format:
-        `projects/{project}/serviceAccounts/{account}`.  Using `-` as a
-        wildcard for the project, will infer the project from the account. The
-        `account` value can be the `email` address or the `unique_id` of the
-        service account.
-
-    Flags:
-      keyTypes: Filters the types of keys the user wants to include in the
-        list response. Duplicate key types are not allowed. If no key type is
-        provided, all keys are returned.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.IamProjectsServiceAccountsKeysListRequest(
-        name=name.decode('utf8'),
-        )
-    if FLAGS['keyTypes'].present:
-      request.keyTypes = [messages.IamProjectsServiceAccountsKeysListRequest.KeyTypesValueValuesEnum(x) for x in FLAGS.keyTypes]
-    result = client.projects_serviceAccounts_keys.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class RolesQueryGrantableRoles(apitools_base_cli.NewCmd):
-  """Command wrapping roles.QueryGrantableRoles."""
-
-  usage = """roles_queryGrantableRoles"""
-
-  def __init__(self, name, fv):
-    super(RolesQueryGrantableRoles, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'fullResourceName',
-        None,
-        u'Required. The full resource name to query from the list of '
-        u'grantable roles.  The name follows the Google Cloud Platform '
-        u'resource format. For example, a Cloud Platform project with id `my-'
-        u'project` will be named '
-        u'`//cloudresourcemanager.googleapis.com/projects/my-project`.',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """Queries roles that can be granted on a particular resource.
-
-    Flags:
-      fullResourceName: Required. The full resource name to query from the
-        list of grantable roles.  The name follows the Google Cloud Platform
-        resource format. For example, a Cloud Platform project with id `my-
-        project` will be named `//cloudresourcemanager.googleapis.com/projects
-        /my-project`.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.QueryGrantableRolesRequest(
-        )
-    if FLAGS['fullResourceName'].present:
-      request.fullResourceName = FLAGS.fullResourceName.decode('utf8')
-    result = client.roles.QueryGrantableRoles(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-def main(_):
-  appcommands.AddCmd('pyshell', PyShell)
-  appcommands.AddCmd('iamPolicies_getPolicyDetails', IamPoliciesGetPolicyDetails)
-  appcommands.AddCmd('projects_serviceAccounts_create', ProjectsServiceAccountsCreate)
-  appcommands.AddCmd('projects_serviceAccounts_delete', ProjectsServiceAccountsDelete)
-  appcommands.AddCmd('projects_serviceAccounts_get', ProjectsServiceAccountsGet)
-  appcommands.AddCmd('projects_serviceAccounts_getIamPolicy', ProjectsServiceAccountsGetIamPolicy)
-  appcommands.AddCmd('projects_serviceAccounts_list', ProjectsServiceAccountsList)
-  appcommands.AddCmd('projects_serviceAccounts_setIamPolicy', ProjectsServiceAccountsSetIamPolicy)
-  appcommands.AddCmd('projects_serviceAccounts_signBlob', ProjectsServiceAccountsSignBlob)
-  appcommands.AddCmd('projects_serviceAccounts_signJwt', ProjectsServiceAccountsSignJwt)
-  appcommands.AddCmd('projects_serviceAccounts_testIamPermissions', ProjectsServiceAccountsTestIamPermissions)
-  appcommands.AddCmd('projects_serviceAccounts_update', ProjectsServiceAccountsUpdate)
-  appcommands.AddCmd('projects_serviceAccounts_keys_create', ProjectsServiceAccountsKeysCreate)
-  appcommands.AddCmd('projects_serviceAccounts_keys_delete', ProjectsServiceAccountsKeysDelete)
-  appcommands.AddCmd('projects_serviceAccounts_keys_get', ProjectsServiceAccountsKeysGet)
-  appcommands.AddCmd('projects_serviceAccounts_keys_list', ProjectsServiceAccountsKeysList)
-  appcommands.AddCmd('roles_queryGrantableRoles', RolesQueryGrantableRoles)
-
-  apitools_base_cli.SetupLogger()
-  if hasattr(appcommands, 'SetDefaultCommand'):
-    appcommands.SetDefaultCommand('pyshell')
-
-
-run_main = apitools_base_cli.run_main
-
-if __name__ == '__main__':
-  appcommands.Run()
diff --git a/samples/iam_sample/iam_v1/iam_v1_client.py b/samples/iam_sample/iam_v1/iam_v1_client.py
index 883c4d4..9f333ef 100644
--- a/samples/iam_sample/iam_v1/iam_v1_client.py
+++ b/samples/iam_sample/iam_v1/iam_v1_client.py
@@ -24,7 +24,7 @@
                get_credentials=True, http=None, model=None,
                log_request=False, log_response=False,
                credentials_args=None, default_global_params=None,
-               additional_http_headers=None):
+               additional_http_headers=None, response_encoding=None):
     """Create a new iam handle."""
     url = url or self.BASE_URL
     super(IamV1, self).__init__(
@@ -33,7 +33,8 @@
         log_request=log_request, log_response=log_response,
         credentials_args=credentials_args,
         default_global_params=default_global_params,
-        additional_http_headers=additional_http_headers)
+        additional_http_headers=additional_http_headers,
+        response_encoding=response_encoding)
     self.iamPolicies = self.IamPoliciesService(self)
     self.projects_serviceAccounts_keys = self.ProjectsServiceAccountsKeysService(self)
     self.projects_serviceAccounts = self.ProjectsServiceAccountsService(self)
@@ -51,7 +52,7 @@
           }
 
     def GetPolicyDetails(self, request, global_params=None):
-      """Returns the current IAM policy and the policies on the inherited resources.
+      r"""Returns the current IAM policy and the policies on the inherited resources.
 that the user has access to.
 
       Args:
@@ -88,7 +89,7 @@
           }
 
     def Create(self, request, global_params=None):
-      """Creates a ServiceAccountKey.
+      r"""Creates a ServiceAccountKey.
 and returns it.
 
       Args:
@@ -116,7 +117,7 @@
     )
 
     def Delete(self, request, global_params=None):
-      """Deletes a ServiceAccountKey.
+      r"""Deletes a ServiceAccountKey.
 
       Args:
         request: (IamProjectsServiceAccountsKeysDeleteRequest) input message
@@ -143,7 +144,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Gets the ServiceAccountKey.
+      r"""Gets the ServiceAccountKey.
 by key id.
 
       Args:
@@ -171,7 +172,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Lists ServiceAccountKeys.
+      r"""Lists ServiceAccountKeys.
 
       Args:
         request: (IamProjectsServiceAccountsKeysListRequest) input message
@@ -208,7 +209,7 @@
           }
 
     def Create(self, request, global_params=None):
-      """Creates a ServiceAccount.
+      r"""Creates a ServiceAccount.
 and returns it.
 
       Args:
@@ -236,7 +237,7 @@
     )
 
     def Delete(self, request, global_params=None):
-      """Deletes a ServiceAccount.
+      r"""Deletes a ServiceAccount.
 
       Args:
         request: (IamProjectsServiceAccountsDeleteRequest) input message
@@ -263,7 +264,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Gets a ServiceAccount.
+      r"""Gets a ServiceAccount.
 
       Args:
         request: (IamProjectsServiceAccountsGetRequest) input message
@@ -290,7 +291,7 @@
     )
 
     def GetIamPolicy(self, request, global_params=None):
-      """Returns the IAM access control policy for specified IAM resource.
+      r"""Returns the IAM access control policy for specified IAM resource.
 
       Args:
         request: (IamProjectsServiceAccountsGetIamPolicyRequest) input message
@@ -317,7 +318,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Lists ServiceAccounts for a project.
+      r"""Lists ServiceAccounts for a project.
 
       Args:
         request: (IamProjectsServiceAccountsListRequest) input message
@@ -344,7 +345,7 @@
     )
 
     def SetIamPolicy(self, request, global_params=None):
-      """Sets the IAM access control policy for the specified IAM resource.
+      r"""Sets the IAM access control policy for the specified IAM resource.
 
       Args:
         request: (IamProjectsServiceAccountsSetIamPolicyRequest) input message
@@ -371,7 +372,7 @@
     )
 
     def SignBlob(self, request, global_params=None):
-      """Signs a blob using a service account's system-managed private key.
+      r"""Signs a blob using a service account's system-managed private key.
 
       Args:
         request: (IamProjectsServiceAccountsSignBlobRequest) input message
@@ -398,7 +399,7 @@
     )
 
     def SignJwt(self, request, global_params=None):
-      """Signs a JWT using a service account's system-managed private key.
+      r"""Signs a JWT using a service account's system-managed private key.
 
 If no `exp` (expiry) time is contained in the claims, we will
 provide an expiry of one hour in the future. If an expiry
@@ -430,7 +431,7 @@
     )
 
     def TestIamPermissions(self, request, global_params=None):
-      """Tests the specified permissions against the IAM access control policy.
+      r"""Tests the specified permissions against the IAM access control policy.
 for the specified IAM resource.
 
       Args:
@@ -458,7 +459,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates a ServiceAccount.
+      r"""Updates a ServiceAccount.
 
 Currently, only the following fields are updatable:
 `display_name` .
@@ -509,7 +510,7 @@
           }
 
     def QueryGrantableRoles(self, request, global_params=None):
-      """Queries roles that can be granted on a particular resource.
+      r"""Queries roles that can be granted on a particular resource.
 
       Args:
         request: (QueryGrantableRolesRequest) input message
diff --git a/samples/iam_sample/iam_v1/iam_v1_messages.py b/samples/iam_sample/iam_v1/iam_v1_messages.py
index 1db85b0..aaf2bcf 100644
--- a/samples/iam_sample/iam_v1/iam_v1_messages.py
+++ b/samples/iam_sample/iam_v1/iam_v1_messages.py
@@ -14,8 +14,8 @@
 
 
 class AuditConfig(_messages.Message):
-  """Enables "data access" audit logging for a service and specifies a list of
-  members that are log-exempted.
+  r"""Enables "data access" audit logging for a service and specifies a list
+  of members that are log-exempted.
 
   Fields:
     exemptedMembers: Specifies the identities that are exempted from "data
@@ -31,7 +31,7 @@
 
 
 class Binding(_messages.Message):
-  """Associates `members` with a `role`.
+  r"""Associates `members` with a `role`.
 
   Fields:
     members: Specifies the identities requesting access for a Cloud Platform
@@ -56,11 +56,11 @@
 
 
 class CloudAuditOptions(_messages.Message):
-  """Write a Cloud Audit log"""
+  r"""Write a Cloud Audit log"""
 
 
 class Condition(_messages.Message):
-  """A condition to be met.
+  r"""A condition to be met.
 
   Enums:
     IamValueValuesEnum: Trusted attributes supplied by the IAM system.
@@ -80,7 +80,7 @@
   """
 
   class IamValueValuesEnum(_messages.Enum):
-    """Trusted attributes supplied by the IAM system.
+    r"""Trusted attributes supplied by the IAM system.
 
     Values:
       NO_ATTR: Default non-attribute.
@@ -92,7 +92,7 @@
     ATTRIBUTION = 2
 
   class OpValueValuesEnum(_messages.Enum):
-    """An operator to apply the subject with.
+    r"""An operator to apply the subject with.
 
     Values:
       NO_OP: Default no-op.
@@ -110,8 +110,8 @@
     DISCHARGED = 5
 
   class SysValueValuesEnum(_messages.Enum):
-    """Trusted attributes supplied by any service that owns resources and uses
-    the IAM system for access control.
+    r"""Trusted attributes supplied by any service that owns resources and
+    uses the IAM system for access control.
 
     Values:
       NO_ATTR: Default non-attribute type
@@ -135,7 +135,7 @@
 
 
 class CounterOptions(_messages.Message):
-  """Options for counters
+  r"""Options for counters
 
   Fields:
     field: The field value to attribute.
@@ -147,7 +147,7 @@
 
 
 class CreateServiceAccountKeyRequest(_messages.Message):
-  """The service account key create request.
+  r"""The service account key create request.
 
   Enums:
     PrivateKeyTypeValueValuesEnum: The output format of the private key.
@@ -159,7 +159,7 @@
   """
 
   class PrivateKeyTypeValueValuesEnum(_messages.Enum):
-    """The output format of the private key. `GOOGLE_CREDENTIALS_FILE` is the
+    r"""The output format of the private key. `GOOGLE_CREDENTIALS_FILE` is the
     default output format.
 
     Values:
@@ -178,7 +178,7 @@
 
 
 class CreateServiceAccountRequest(_messages.Message):
-  """The service account create request.
+  r"""The service account create request.
 
   Fields:
     accountId: Required. The account id that is used to generate the service
@@ -194,11 +194,11 @@
 
 
 class DataAccessOptions(_messages.Message):
-  """Write a Data Access (Gin) log"""
+  r"""Write a Data Access (Gin) log"""
 
 
 class Empty(_messages.Message):
-  """A generic empty message that you can re-use to avoid defining duplicated
+  r"""A generic empty message that you can re-use to avoid defining duplicated
   empty messages in your APIs. A typical example is to use it as the request
   or the response type of an API method. For instance:      service Foo {
   rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);     }  The
@@ -208,7 +208,7 @@
 
 
 class GetPolicyDetailsRequest(_messages.Message):
-  """The request to get the current policy and the policies on the inherited
+  r"""The request to get the current policy and the policies on the inherited
   resources the user has access to.
 
   Fields:
@@ -229,7 +229,7 @@
 
 
 class GetPolicyDetailsResponse(_messages.Message):
-  """The response to the `GetPolicyDetailsRequest` containing the current
+  r"""The response to the `GetPolicyDetailsRequest` containing the current
   policy and the policies on the inherited resources the user has access to.
 
   Fields:
@@ -247,7 +247,7 @@
 
 
 class IamProjectsServiceAccountsCreateRequest(_messages.Message):
-  """A IamProjectsServiceAccountsCreateRequest object.
+  r"""A IamProjectsServiceAccountsCreateRequest object.
 
   Fields:
     createServiceAccountRequest: A CreateServiceAccountRequest resource to be
@@ -261,7 +261,7 @@
 
 
 class IamProjectsServiceAccountsDeleteRequest(_messages.Message):
-  """A IamProjectsServiceAccountsDeleteRequest object.
+  r"""A IamProjectsServiceAccountsDeleteRequest object.
 
   Fields:
     name: The resource name of the service account in the following format:
@@ -275,7 +275,7 @@
 
 
 class IamProjectsServiceAccountsGetIamPolicyRequest(_messages.Message):
-  """A IamProjectsServiceAccountsGetIamPolicyRequest object.
+  r"""A IamProjectsServiceAccountsGetIamPolicyRequest object.
 
   Fields:
     resource: REQUIRED: The resource for which the policy is being requested.
@@ -289,7 +289,7 @@
 
 
 class IamProjectsServiceAccountsGetRequest(_messages.Message):
-  """A IamProjectsServiceAccountsGetRequest object.
+  r"""A IamProjectsServiceAccountsGetRequest object.
 
   Fields:
     name: The resource name of the service account in the following format:
@@ -303,7 +303,7 @@
 
 
 class IamProjectsServiceAccountsKeysCreateRequest(_messages.Message):
-  """A IamProjectsServiceAccountsKeysCreateRequest object.
+  r"""A IamProjectsServiceAccountsKeysCreateRequest object.
 
   Fields:
     createServiceAccountKeyRequest: A CreateServiceAccountKeyRequest resource
@@ -320,7 +320,7 @@
 
 
 class IamProjectsServiceAccountsKeysDeleteRequest(_messages.Message):
-  """A IamProjectsServiceAccountsKeysDeleteRequest object.
+  r"""A IamProjectsServiceAccountsKeysDeleteRequest object.
 
   Fields:
     name: The resource name of the service account key in the following
@@ -334,7 +334,7 @@
 
 
 class IamProjectsServiceAccountsKeysGetRequest(_messages.Message):
-  """A IamProjectsServiceAccountsKeysGetRequest object.
+  r"""A IamProjectsServiceAccountsKeysGetRequest object.
 
   Enums:
     PublicKeyTypeValueValuesEnum: The output format of the public key
@@ -351,7 +351,7 @@
   """
 
   class PublicKeyTypeValueValuesEnum(_messages.Enum):
-    """The output format of the public key requested. X509_PEM is the default
+    r"""The output format of the public key requested. X509_PEM is the default
     output format.
 
     Values:
@@ -368,7 +368,7 @@
 
 
 class IamProjectsServiceAccountsKeysListRequest(_messages.Message):
-  """A IamProjectsServiceAccountsKeysListRequest object.
+  r"""A IamProjectsServiceAccountsKeysListRequest object.
 
   Enums:
     KeyTypesValueValuesEnum: Filters the types of keys the user wants to
@@ -387,7 +387,7 @@
   """
 
   class KeyTypesValueValuesEnum(_messages.Enum):
-    """Filters the types of keys the user wants to include in the list
+    r"""Filters the types of keys the user wants to include in the list
     response. Duplicate key types are not allowed. If no key type is provided,
     all keys are returned.
 
@@ -405,7 +405,7 @@
 
 
 class IamProjectsServiceAccountsListRequest(_messages.Message):
-  """A IamProjectsServiceAccountsListRequest object.
+  r"""A IamProjectsServiceAccountsListRequest object.
 
   Fields:
     name: Required. The resource name of the project associated with the
@@ -427,7 +427,7 @@
 
 
 class IamProjectsServiceAccountsSetIamPolicyRequest(_messages.Message):
-  """A IamProjectsServiceAccountsSetIamPolicyRequest object.
+  r"""A IamProjectsServiceAccountsSetIamPolicyRequest object.
 
   Fields:
     resource: REQUIRED: The resource for which the policy is being specified.
@@ -444,7 +444,7 @@
 
 
 class IamProjectsServiceAccountsSignBlobRequest(_messages.Message):
-  """A IamProjectsServiceAccountsSignBlobRequest object.
+  r"""A IamProjectsServiceAccountsSignBlobRequest object.
 
   Fields:
     name: The resource name of the service account in the following format:
@@ -461,7 +461,7 @@
 
 
 class IamProjectsServiceAccountsSignJwtRequest(_messages.Message):
-  """A IamProjectsServiceAccountsSignJwtRequest object.
+  r"""A IamProjectsServiceAccountsSignJwtRequest object.
 
   Fields:
     name: The resource name of the service account in the following format:
@@ -478,7 +478,7 @@
 
 
 class IamProjectsServiceAccountsTestIamPermissionsRequest(_messages.Message):
-  """A IamProjectsServiceAccountsTestIamPermissionsRequest object.
+  r"""A IamProjectsServiceAccountsTestIamPermissionsRequest object.
 
   Fields:
     resource: REQUIRED: The resource for which the policy detail is being
@@ -495,7 +495,7 @@
 
 
 class ListServiceAccountKeysResponse(_messages.Message):
-  """The service account keys list response.
+  r"""The service account keys list response.
 
   Fields:
     keys: The public keys for the service account.
@@ -505,7 +505,7 @@
 
 
 class ListServiceAccountsResponse(_messages.Message):
-  """The service account list response.
+  r"""The service account list response.
 
   Fields:
     accounts: The list of matching service accounts.
@@ -518,7 +518,7 @@
 
 
 class LogConfig(_messages.Message):
-  """Specifies what kind of log the caller must write Increment a streamz
+  r"""Specifies what kind of log the caller must write Increment a streamz
   counter with the specified metric and field names.  Metric names should
   start with a '/', generally be lowercase-only, and end in "_count". Field
   names should not contain an initial slash. The actual exported metric names
@@ -545,7 +545,7 @@
 
 
 class Policy(_messages.Message):
-  """Defines an Identity and Access Management (IAM) policy. It is used to
+  r"""Defines an Identity and Access Management (IAM) policy. It is used to
   specify access control policies for Cloud Platform resources.   A `Policy`
   consists of a list of `bindings`. A `Binding` binds a list of `members` to a
   `role`, where the members can be user accounts, Google groups, Google
@@ -596,7 +596,7 @@
 
 
 class PolicyDetail(_messages.Message):
-  """A policy and its full resource path.
+  r"""A policy and its full resource path.
 
   Fields:
     fullResourcePath: The full resource path of the policy e.g.,
@@ -610,7 +610,7 @@
 
 
 class QueryGrantableRolesRequest(_messages.Message):
-  """The grantable role query request.
+  r"""The grantable role query request.
 
   Fields:
     fullResourceName: Required. The full resource name to query from the list
@@ -623,7 +623,7 @@
 
 
 class QueryGrantableRolesResponse(_messages.Message):
-  """The grantable role query response.
+  r"""The grantable role query response.
 
   Fields:
     roles: The list of matching roles.
@@ -633,7 +633,7 @@
 
 
 class Role(_messages.Message):
-  """A role in the Identity and Access Management API.
+  r"""A role in the Identity and Access Management API.
 
   Fields:
     apiTokens: A string attribute.
@@ -651,7 +651,7 @@
 
 
 class Rule(_messages.Message):
-  """A rule to be applied in a Policy.
+  r"""A rule to be applied in a Policy.
 
   Enums:
     ActionValueValuesEnum: Required
@@ -675,7 +675,7 @@
   """
 
   class ActionValueValuesEnum(_messages.Enum):
-    """Required
+    r"""Required
 
     Values:
       NO_ACTION: Default no action.
@@ -704,8 +704,8 @@
 
 
 class ServiceAccount(_messages.Message):
-  """A service account in the Identity and Access Management API.  To create a
-  service account, specify the `project_id` and the `account_id` for the
+  r"""A service account in the Identity and Access Management API.  To create
+  a service account, specify the `project_id` and the `account_id` for the
   account.  The `account_id` is unique within the project, and is used to
   generate the service account email address and a stable `unique_id`.  All
   other methods can identify the service account using the format
@@ -745,8 +745,8 @@
 
 
 class ServiceAccountKey(_messages.Message):
-  """Represents a service account key.  A service account has two sets of key-
-  pairs: user-managed, and system-managed.  User-managed key-pairs can be
+  r"""Represents a service account key.  A service account has two sets of
+  key-pairs: user-managed, and system-managed.  User-managed key-pairs can be
   created and deleted by users.  Users are responsible for rotating these keys
   periodically to ensure security of their service accounts.  Users retain the
   private key of these key-pairs, and Google retains ONLY the public key.
@@ -778,7 +778,7 @@
   """
 
   class PrivateKeyTypeValueValuesEnum(_messages.Enum):
-    """The output format for the private key. Only provided in
+    r"""The output format for the private key. Only provided in
     `CreateServiceAccountKey` responses, not in `GetServiceAccountKey` or
     `ListServiceAccountKey` responses.  Google never exposes system-managed
     private keys, and never retains user-managed private keys.
@@ -804,7 +804,7 @@
 
 
 class SetIamPolicyRequest(_messages.Message):
-  """Request message for `SetIamPolicy` method.
+  r"""Request message for `SetIamPolicy` method.
 
   Fields:
     policy: REQUIRED: The complete policy to be applied to the `resource`. The
@@ -817,7 +817,7 @@
 
 
 class SignBlobRequest(_messages.Message):
-  """The service account sign blob request.
+  r"""The service account sign blob request.
 
   Fields:
     bytesToSign: The bytes to sign.
@@ -827,7 +827,7 @@
 
 
 class SignBlobResponse(_messages.Message):
-  """The service account sign blob response.
+  r"""The service account sign blob response.
 
   Fields:
     keyId: The id of the key used to sign the blob.
@@ -839,7 +839,7 @@
 
 
 class SignJwtRequest(_messages.Message):
-  """The service account sign JWT request.
+  r"""The service account sign JWT request.
 
   Fields:
     payload: The JWT payload to sign, a JSON JWT Claim set.
@@ -849,7 +849,7 @@
 
 
 class SignJwtResponse(_messages.Message):
-  """The service account sign JWT response.
+  r"""The service account sign JWT response.
 
   Fields:
     keyId: The id of the key used to sign the JWT.
@@ -861,7 +861,7 @@
 
 
 class StandardQueryParameters(_messages.Message):
-  """Query parameters accepted by all methods.
+  r"""Query parameters accepted by all methods.
 
   Enums:
     FXgafvValueValuesEnum: V1 error format.
@@ -890,7 +890,7 @@
   """
 
   class AltValueValuesEnum(_messages.Enum):
-    """Data format for response.
+    r"""Data format for response.
 
     Values:
       json: Responses with Content-Type of application/json
@@ -902,7 +902,7 @@
     proto = 2
 
   class FXgafvValueValuesEnum(_messages.Enum):
-    """V1 error format.
+    r"""V1 error format.
 
     Values:
       _1: v1 error format
@@ -928,7 +928,7 @@
 
 
 class TestIamPermissionsRequest(_messages.Message):
-  """Request message for `TestIamPermissions` method.
+  r"""Request message for `TestIamPermissions` method.
 
   Fields:
     permissions: The set of permissions to check for the `resource`.
@@ -940,7 +940,7 @@
 
 
 class TestIamPermissionsResponse(_messages.Message):
-  """Response message for `TestIamPermissions` method.
+  r"""Response message for `TestIamPermissions` method.
 
   Fields:
     permissions: A subset of `TestPermissionsRequest.permissions` that the
@@ -951,14 +951,10 @@
 
 
 encoding.AddCustomJsonFieldMapping(
-    Rule, 'in_', 'in',
-    package=u'iam')
+    Rule, 'in_', 'in')
 encoding.AddCustomJsonFieldMapping(
-    StandardQueryParameters, 'f__xgafv', '$.xgafv',
-    package=u'iam')
+    StandardQueryParameters, 'f__xgafv', '$.xgafv')
 encoding.AddCustomJsonEnumMapping(
-    StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1',
-    package=u'iam')
+    StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
 encoding.AddCustomJsonEnumMapping(
-    StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2',
-    package=u'iam')
+    StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
diff --git a/samples/regenerate_samples.py b/samples/regenerate_samples.py
index 9d41795..9a8ceba 100644
--- a/samples/regenerate_samples.py
+++ b/samples/regenerate_samples.py
@@ -12,12 +12,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""Script to regenerate samples with latest client generator."""
+"""Script to regenerate samples with latest client generator.
+
+To run:
+
+  python samples/regenerate_samples.py
+
+"""
 
 import os
 import subprocess
-
-_GEN_CLIENT_BINARY = 'gen_client'
+import sys
 
 _SAMPLES = [
     'bigquery_sample/bigquery_v2.json',
@@ -30,16 +35,27 @@
 
 
 def _Generate(samples):
+    # insert $PWD onto PYTHONPATH
+    insert_python_dir = os.getcwd()
+    python_path = os.environ.get('PYTHONPATH')
+    if python_path:
+      python_path = os.pathsep.join([insert_python_dir, python_path])
+    else:
+      python_path = insert_python_dir
+    os.environ['PYTHONPATH'] = python_path
+
     for sample in samples:
         sample_dir, sample_doc = os.path.split(sample)
+        sample_dir = 'samples/' + sample_dir
         name, ext = os.path.splitext(sample_doc)
         if ext != '.json':
             raise RuntimeError('Expected .json discovery doc [{0}]'
                                .format(sample))
         api_name, api_version = name.split('_')
         args = [
-            _GEN_CLIENT_BINARY,
-            '--infile', sample,
+            'python',
+            'apitools/gen/gen_client.py',
+            '--infile', 'samples/' + sample,
             '--init-file', 'empty',
             '--outdir={0}'.format(os.path.join(sample_dir, name)),
             '--overwrite',
@@ -47,6 +63,7 @@
             'samples.{0}_sample.{0}_{1}'.format(api_name, api_version),
             'client',
         ]
+        sys.stderr.write('Running: {}\n'.format(' '.join(args)))
         subprocess.check_call(args)
 
 
diff --git a/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1.py b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1.py
deleted file mode 100644
index d1a4ab8..0000000
--- a/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1.py
+++ /dev/null
@@ -1,1520 +0,0 @@
-#!/usr/bin/env python
-"""CLI for servicemanagement, version v1."""
-# NOTE: This file is autogenerated and should not be edited by hand.
-
-import code
-import os
-import platform
-import sys
-
-from apitools.base.protorpclite import message_types
-from apitools.base.protorpclite import messages
-
-from google.apputils import appcommands
-import gflags as flags
-
-import apitools.base.py as apitools_base
-from apitools.base.py import cli as apitools_base_cli
-import servicemanagement_v1_client as client_lib
-import servicemanagement_v1_messages as messages
-
-
-def _DeclareServicemanagementFlags():
-  """Declare global flags in an idempotent way."""
-  if 'api_endpoint' in flags.FLAGS:
-    return
-  flags.DEFINE_string(
-      'api_endpoint',
-      u'https://servicemanagement.googleapis.com/',
-      'URL of the API endpoint to use.',
-      short_name='servicemanagement_url')
-  flags.DEFINE_string(
-      'history_file',
-      u'~/.servicemanagement.v1.history',
-      'File with interactive shell history.')
-  flags.DEFINE_multistring(
-      'add_header', [],
-      'Additional http headers (as key=value strings). '
-      'Can be specified multiple times.')
-  flags.DEFINE_string(
-      'service_account_json_keyfile', '',
-      'Filename for a JSON service account key downloaded'
-      ' from the Developer Console.')
-  flags.DEFINE_enum(
-      'f__xgafv',
-      u'_1',
-      [u'_1', u'_2'],
-      u'V1 error format.')
-  flags.DEFINE_string(
-      'access_token',
-      None,
-      u'OAuth access token.')
-  flags.DEFINE_enum(
-      'alt',
-      u'json',
-      [u'json', u'media', u'proto'],
-      u'Data format for response.')
-  flags.DEFINE_string(
-      'bearer_token',
-      None,
-      u'OAuth bearer token.')
-  flags.DEFINE_string(
-      'callback',
-      None,
-      u'JSONP')
-  flags.DEFINE_string(
-      'fields',
-      None,
-      u'Selector specifying which fields to include in a partial response.')
-  flags.DEFINE_string(
-      'key',
-      None,
-      u'API key. Your API key identifies your project and provides you with '
-      u'API access, quota, and reports. Required unless you provide an OAuth '
-      u'2.0 token.')
-  flags.DEFINE_string(
-      'oauth_token',
-      None,
-      u'OAuth 2.0 token for the current user.')
-  flags.DEFINE_boolean(
-      'pp',
-      'True',
-      u'Pretty-print response.')
-  flags.DEFINE_boolean(
-      'prettyPrint',
-      'True',
-      u'Returns response with indentations and line breaks.')
-  flags.DEFINE_string(
-      'quotaUser',
-      None,
-      u'Available to use for quota purposes for server-side applications. Can'
-      u' be any arbitrary string assigned to a user, but should not exceed 40'
-      u' characters.')
-  flags.DEFINE_string(
-      'trace',
-      None,
-      'A tracing token of the form "token:<tokenid>" to include in api '
-      'requests.')
-  flags.DEFINE_string(
-      'uploadType',
-      None,
-      u'Legacy upload protocol for media (e.g. "media", "multipart").')
-  flags.DEFINE_string(
-      'upload_protocol',
-      None,
-      u'Upload protocol for media (e.g. "raw", "multipart").')
-
-
-FLAGS = flags.FLAGS
-apitools_base_cli.DeclareBaseFlags()
-_DeclareServicemanagementFlags()
-
-
-def GetGlobalParamsFromFlags():
-  """Return a StandardQueryParameters based on flags."""
-  result = messages.StandardQueryParameters()
-  if FLAGS['f__xgafv'].present:
-    result.f__xgafv = messages.StandardQueryParameters.FXgafvValueValuesEnum(FLAGS.f__xgafv)
-  if FLAGS['access_token'].present:
-    result.access_token = FLAGS.access_token.decode('utf8')
-  if FLAGS['alt'].present:
-    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
-  if FLAGS['bearer_token'].present:
-    result.bearer_token = FLAGS.bearer_token.decode('utf8')
-  if FLAGS['callback'].present:
-    result.callback = FLAGS.callback.decode('utf8')
-  if FLAGS['fields'].present:
-    result.fields = FLAGS.fields.decode('utf8')
-  if FLAGS['key'].present:
-    result.key = FLAGS.key.decode('utf8')
-  if FLAGS['oauth_token'].present:
-    result.oauth_token = FLAGS.oauth_token.decode('utf8')
-  if FLAGS['pp'].present:
-    result.pp = FLAGS.pp
-  if FLAGS['prettyPrint'].present:
-    result.prettyPrint = FLAGS.prettyPrint
-  if FLAGS['quotaUser'].present:
-    result.quotaUser = FLAGS.quotaUser.decode('utf8')
-  if FLAGS['trace'].present:
-    result.trace = FLAGS.trace.decode('utf8')
-  if FLAGS['uploadType'].present:
-    result.uploadType = FLAGS.uploadType.decode('utf8')
-  if FLAGS['upload_protocol'].present:
-    result.upload_protocol = FLAGS.upload_protocol.decode('utf8')
-  return result
-
-
-def GetClientFromFlags():
-  """Return a client object, configured from flags."""
-  log_request = FLAGS.log_request or FLAGS.log_request_response
-  log_response = FLAGS.log_response or FLAGS.log_request_response
-  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
-  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
-  credentials_args = {
-      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
-  }
-  try:
-    client = client_lib.ServicemanagementV1(
-        api_endpoint, log_request=log_request,
-        log_response=log_response,
-        credentials_args=credentials_args,
-        additional_http_headers=additional_http_headers)
-  except apitools_base.CredentialsError as e:
-    print 'Error creating credentials: %s' % e
-    sys.exit(1)
-  return client
-
-
-class PyShell(appcommands.Cmd):
-
-  def Run(self, _):
-    """Run an interactive python shell with the client."""
-    client = GetClientFromFlags()
-    params = GetGlobalParamsFromFlags()
-    for field in params.all_fields():
-      value = params.get_assigned_value(field.name)
-      if value != field.default:
-        client.AddGlobalParam(field.name, value)
-    banner = """
-           == servicemanagement interactive console ==
-                 client: a servicemanagement client
-          apitools_base: base apitools module
-         messages: the generated messages module
-    """
-    local_vars = {
-        'apitools_base': apitools_base,
-        'client': client,
-        'client_lib': client_lib,
-        'messages': messages,
-    }
-    if platform.system() == 'Linux':
-      console = apitools_base_cli.ConsoleWithReadline(
-          local_vars, histfile=FLAGS.history_file)
-    else:
-      console = code.InteractiveConsole(local_vars)
-    try:
-      console.interact(banner)
-    except SystemExit as e:
-      return e.code
-
-
-class OperationsGet(apitools_base_cli.NewCmd):
-  """Command wrapping operations.Get."""
-
-  usage = """operations_get <operationsId>"""
-
-  def __init__(self, name, fv):
-    super(OperationsGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, operationsId):
-    """Gets the latest state of a long-running operation.  Clients can use
-    this method to poll the operation result at intervals as recommended by
-    the API service.
-
-    Args:
-      operationsId: Part of `name`. The name of the operation resource.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementOperationsGetRequest(
-        operationsId=operationsId.decode('utf8'),
-        )
-    result = client.operations.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesConvertConfig(apitools_base_cli.NewCmd):
-  """Command wrapping services.ConvertConfig."""
-
-  usage = """services_convertConfig"""
-
-  def __init__(self, name, fv):
-    super(ServicesConvertConfig, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'configSpec',
-        None,
-        u'Input configuration For this version of API, the supported type is '
-        u'OpenApiSpec',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'openApiSpec',
-        None,
-        u'The OpenAPI specification for an API.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'serviceName',
-        None,
-        u'The service name to use for constructing the normalized service '
-        u'configuration equivalent of the provided configuration '
-        u'specification.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'swaggerSpec',
-        None,
-        u'The swagger specification for an API.',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide
-    config conversion moving forward.  Converts an API specification (e.g.
-    Swagger spec) to an equivalent `google.api.Service`.
-
-    Flags:
-      configSpec: Input configuration For this version of API, the supported
-        type is OpenApiSpec
-      openApiSpec: The OpenAPI specification for an API.
-      serviceName: The service name to use for constructing the normalized
-        service configuration equivalent of the provided configuration
-        specification.
-      swaggerSpec: The swagger specification for an API.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ConvertConfigRequest(
-        )
-    if FLAGS['configSpec'].present:
-      request.configSpec = apitools_base.JsonToMessage(messages.ConvertConfigRequest.ConfigSpecValue, FLAGS.configSpec)
-    if FLAGS['openApiSpec'].present:
-      request.openApiSpec = apitools_base.JsonToMessage(messages.OpenApiSpec, FLAGS.openApiSpec)
-    if FLAGS['serviceName'].present:
-      request.serviceName = FLAGS.serviceName.decode('utf8')
-    if FLAGS['swaggerSpec'].present:
-      request.swaggerSpec = apitools_base.JsonToMessage(messages.SwaggerSpec, FLAGS.swaggerSpec)
-    result = client.services.ConvertConfig(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesCreate(apitools_base_cli.NewCmd):
-  """Command wrapping services.Create."""
-
-  usage = """services_create"""
-
-  def __init__(self, name, fv):
-    super(ServicesCreate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'configSource',
-        None,
-        u'User-supplied source configuration for the service. This is '
-        u'distinct from the generated configuration provided in '
-        u'`google.api.Service`. This is NOT populated on GetService calls at '
-        u'the moment. NOTE: Any upsert operation that contains both a '
-        u'service_config and a config_source is considered invalid and will '
-        u'result in an error being returned.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'A server-assigned monotonically increasing number that changes '
-        u'whenever a mutation is made to the `ManagedService` or any of its '
-        u'components via the `ServiceManager` API.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'operations',
-        None,
-        u'Read-only view of pending operations affecting this resource, if '
-        u'requested.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'producerProjectId',
-        None,
-        u'ID of the project that produces and owns this service.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'projectSettings',
-        None,
-        u'Read-only view of settings for a particular consumer project, if '
-        u'requested.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'serviceConfig',
-        None,
-        u"The service's generated configuration.",
-        flag_values=fv)
-    flags.DEFINE_string(
-        'serviceName',
-        None,
-        u'The name of the service.  See the `ServiceManager` overview for '
-        u'naming requirements.  This name must match '
-        u'`google.api.Service.name` in the `service_config` field.',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """Creates a new managed service.  Operation<response: ManagedService>
-
-    Flags:
-      configSource: User-supplied source configuration for the service. This
-        is distinct from the generated configuration provided in
-        `google.api.Service`. This is NOT populated on GetService calls at the
-        moment. NOTE: Any upsert operation that contains both a service_config
-        and a config_source is considered invalid and will result in an error
-        being returned.
-      generation: A server-assigned monotonically increasing number that
-        changes whenever a mutation is made to the `ManagedService` or any of
-        its components via the `ServiceManager` API.
-      operations: Read-only view of pending operations affecting this
-        resource, if requested.
-      producerProjectId: ID of the project that produces and owns this
-        service.
-      projectSettings: Read-only view of settings for a particular consumer
-        project, if requested.
-      serviceConfig: The service's generated configuration.
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  This name must match
-        `google.api.Service.name` in the `service_config` field.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ManagedService(
-        )
-    if FLAGS['configSource'].present:
-      request.configSource = apitools_base.JsonToMessage(messages.ConfigSource, FLAGS.configSource)
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['operations'].present:
-      request.operations = [apitools_base.JsonToMessage(messages.Operation, x) for x in FLAGS.operations]
-    if FLAGS['producerProjectId'].present:
-      request.producerProjectId = FLAGS.producerProjectId.decode('utf8')
-    if FLAGS['projectSettings'].present:
-      request.projectSettings = apitools_base.JsonToMessage(messages.ProjectSettings, FLAGS.projectSettings)
-    if FLAGS['serviceConfig'].present:
-      request.serviceConfig = apitools_base.JsonToMessage(messages.Service, FLAGS.serviceConfig)
-    if FLAGS['serviceName'].present:
-      request.serviceName = FLAGS.serviceName.decode('utf8')
-    result = client.services.Create(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesDelete(apitools_base_cli.NewCmd):
-  """Command wrapping services.Delete."""
-
-  usage = """services_delete <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, serviceName):
-    """Deletes a managed service.  Operation<response: google.protobuf.Empty>
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesDeleteRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    result = client.services.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesDisable(apitools_base_cli.NewCmd):
-  """Command wrapping services.Disable."""
-
-  usage = """services_disable <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesDisable, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'disableServiceRequest',
-        None,
-        u'A DisableServiceRequest resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Disable a managed service for a project. Google Service Management will
-    only disable the managed service even if there are other services depend
-    on the managed service.  Operation<response: DisableServiceResponse>
-
-    Args:
-      serviceName: Name of the service to disable. Specifying an unknown
-        service name will cause the request to fail.
-
-    Flags:
-      disableServiceRequest: A DisableServiceRequest resource to be passed as
-        the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesDisableRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['disableServiceRequest'].present:
-      request.disableServiceRequest = apitools_base.JsonToMessage(messages.DisableServiceRequest, FLAGS.disableServiceRequest)
-    result = client.services.Disable(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesEnable(apitools_base_cli.NewCmd):
-  """Command wrapping services.Enable."""
-
-  usage = """services_enable <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesEnable, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'enableServiceRequest',
-        None,
-        u'A EnableServiceRequest resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Enable a managed service for a project with default setting. If the
-    managed service has dependencies, they will be enabled as well.
-    Operation<response: EnableServiceResponse>
-
-    Args:
-      serviceName: Name of the service to enable. Specifying an unknown
-        service name will cause the request to fail.
-
-    Flags:
-      enableServiceRequest: A EnableServiceRequest resource to be passed as
-        the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesEnableRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['enableServiceRequest'].present:
-      request.enableServiceRequest = apitools_base.JsonToMessage(messages.EnableServiceRequest, FLAGS.enableServiceRequest)
-    result = client.services.Enable(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesGet(apitools_base_cli.NewCmd):
-  """Command wrapping services.Get."""
-
-  usage = """services_get <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesGet, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'consumerProjectId',
-        None,
-        u'If project_settings is expanded, return settings for the specified '
-        u'consumer project.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'expand',
-        None,
-        u'Fields to expand in any results.  By default, the following fields '
-        u'are not present in the result: - `operations` - `project_settings` '
-        u'- `project_settings.operations` - `quota_usage` (It requires '
-        u'`project_settings`) - `historical_quota_usage` (It requires '
-        u'`project_settings`)',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'view',
-        u'PROJECT_SETTINGS_VIEW_UNSPECIFIED',
-        [u'PROJECT_SETTINGS_VIEW_UNSPECIFIED', u'CONSUMER_VIEW', u'PRODUCER_VIEW', u'ALL'],
-        u'If project_settings is expanded, request only fields for the '
-        u'specified view.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Gets a managed service. If the `consumer_project_id` is specified, the
-    project's settings for the specified service are also returned.
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-
-    Flags:
-      consumerProjectId: If project_settings is expanded, return settings for
-        the specified consumer project.
-      expand: Fields to expand in any results.  By default, the following
-        fields are not present in the result: - `operations` -
-        `project_settings` - `project_settings.operations` - `quota_usage` (It
-        requires `project_settings`) - `historical_quota_usage` (It requires
-        `project_settings`)
-      view: If project_settings is expanded, request only fields for the
-        specified view.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesGetRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['consumerProjectId'].present:
-      request.consumerProjectId = FLAGS.consumerProjectId.decode('utf8')
-    if FLAGS['expand'].present:
-      request.expand = FLAGS.expand.decode('utf8')
-    if FLAGS['view'].present:
-      request.view = messages.ServicemanagementServicesGetRequest.ViewValueValuesEnum(FLAGS.view)
-    result = client.services.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesGetAccessPolicy(apitools_base_cli.NewCmd):
-  """Command wrapping services.GetAccessPolicy."""
-
-  usage = """services_getAccessPolicy <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesGetAccessPolicy, self).__init__(name, fv)
-
-  def RunWithArgs(self, serviceName):
-    """Producer method to retrieve current policy.
-
-    Args:
-      serviceName: The name of the service.  For example:
-        `example.googleapis.com`.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesGetAccessPolicyRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    result = client.services.GetAccessPolicy(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesGetConfig(apitools_base_cli.NewCmd):
-  """Command wrapping services.GetConfig."""
-
-  usage = """services_getConfig <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesGetConfig, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'configId',
-        None,
-        u'The id of the service config resource. Optional. If it is not '
-        u'specified, the latest version of config will be returned.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Gets a service config (version) for a managed service. If `config_id`
-    is not specified, the latest service config will be returned.
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-
-    Flags:
-      configId: The id of the service config resource. Optional. If it is not
-        specified, the latest version of config will be returned.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesGetConfigRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['configId'].present:
-      request.configId = FLAGS.configId.decode('utf8')
-    result = client.services.GetConfig(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesList(apitools_base_cli.NewCmd):
-  """Command wrapping services.List."""
-
-  usage = """services_list"""
-
-  def __init__(self, name, fv):
-    super(ServicesList, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'category',
-        None,
-        u'Include services only in the specified category. Supported '
-        u'categories are servicemanagement.googleapis.com/categories/google-'
-        u'services or servicemanagement.googleapis.com/categories/play-games.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'consumerProjectId',
-        None,
-        u'Include services consumed by the specified project.  If '
-        u'project_settings is expanded, then this field controls which '
-        u'project project_settings is populated for.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'expand',
-        None,
-        u'Fields to expand in any results.  By default, the following fields '
-        u'are not fully included in list results: - `operations` - '
-        u'`project_settings` - `project_settings.operations` - `quota_usage` '
-        u'(It requires `project_settings`)',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'pageSize',
-        None,
-        u'Requested size of the next page of data.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'Token identifying which result to start with; returned by a '
-        u'previous list call.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'producerProjectId',
-        None,
-        u'Include services produced by the specified project.',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """Lists all managed services. If the `consumer_project_id` is specified,
-    the project's settings for the specified service are also returned.
-
-    Flags:
-      category: Include services only in the specified category. Supported
-        categories are servicemanagement.googleapis.com/categories/google-
-        services or servicemanagement.googleapis.com/categories/play-games.
-      consumerProjectId: Include services consumed by the specified project.
-        If project_settings is expanded, then this field controls which
-        project project_settings is populated for.
-      expand: Fields to expand in any results.  By default, the following
-        fields are not fully included in list results: - `operations` -
-        `project_settings` - `project_settings.operations` - `quota_usage` (It
-        requires `project_settings`)
-      pageSize: Requested size of the next page of data.
-      pageToken: Token identifying which result to start with; returned by a
-        previous list call.
-      producerProjectId: Include services produced by the specified project.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesListRequest(
-        )
-    if FLAGS['category'].present:
-      request.category = FLAGS.category.decode('utf8')
-    if FLAGS['consumerProjectId'].present:
-      request.consumerProjectId = FLAGS.consumerProjectId.decode('utf8')
-    if FLAGS['expand'].present:
-      request.expand = FLAGS.expand.decode('utf8')
-    if FLAGS['pageSize'].present:
-      request.pageSize = FLAGS.pageSize
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['producerProjectId'].present:
-      request.producerProjectId = FLAGS.producerProjectId.decode('utf8')
-    result = client.services.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesPatch(apitools_base_cli.NewCmd):
-  """Command wrapping services.Patch."""
-
-  usage = """services_patch <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'managedService',
-        None,
-        u'A ManagedService resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'updateMask',
-        None,
-        u'A mask specifying which fields to update.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Updates the specified subset of the configuration. If the specified
-    service does not exists the patch operation fails.  Operation<response:
-    ManagedService>
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-
-    Flags:
-      managedService: A ManagedService resource to be passed as the request
-        body.
-      updateMask: A mask specifying which fields to update.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesPatchRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['managedService'].present:
-      request.managedService = apitools_base.JsonToMessage(messages.ManagedService, FLAGS.managedService)
-    if FLAGS['updateMask'].present:
-      request.updateMask = FLAGS.updateMask.decode('utf8')
-    result = client.services.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesPatchConfig(apitools_base_cli.NewCmd):
-  """Command wrapping services.PatchConfig."""
-
-  usage = """services_patchConfig <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesPatchConfig, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'service',
-        None,
-        u'A Service resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'updateMask',
-        None,
-        u'A mask specifying which fields to update.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Updates the specified subset of the service resource. Equivalent to
-    calling `PatchService` with only the `service_config` field updated.
-    Operation<response: google.api.Service>
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-
-    Flags:
-      service: A Service resource to be passed as the request body.
-      updateMask: A mask specifying which fields to update.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesPatchConfigRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['service'].present:
-      request.service = apitools_base.JsonToMessage(messages.Service, FLAGS.service)
-    if FLAGS['updateMask'].present:
-      request.updateMask = FLAGS.updateMask.decode('utf8')
-    result = client.services.PatchConfig(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping services.Update."""
-
-  usage = """services_update <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'managedService',
-        None,
-        u'A ManagedService resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'updateMask',
-        None,
-        u'A mask specifying which fields to update. Update mask has been '
-        u'deprecated on UpdateService service method. Please use PatchService'
-        u' method instead to do partial updates.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Updates the configuration of a service.  If the specified service does
-    not already exist, then it is created.  Operation<response:
-    ManagedService>
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-
-    Flags:
-      managedService: A ManagedService resource to be passed as the request
-        body.
-      updateMask: A mask specifying which fields to update. Update mask has
-        been deprecated on UpdateService service method. Please use
-        PatchService method instead to do partial updates.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesUpdateRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['managedService'].present:
-      request.managedService = apitools_base.JsonToMessage(messages.ManagedService, FLAGS.managedService)
-    if FLAGS['updateMask'].present:
-      request.updateMask = FLAGS.updateMask.decode('utf8')
-    result = client.services.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesUpdateAccessPolicy(apitools_base_cli.NewCmd):
-  """Command wrapping services.UpdateAccessPolicy."""
-
-  usage = """services_updateAccessPolicy <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesUpdateAccessPolicy, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'accessList',
-        None,
-        u'ACL for access to the unrestricted surface of the service.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'visibilityLabelAccessLists',
-        None,
-        u'ACLs for access to restricted parts of the service.  The map key is'
-        u' the visibility label that is being controlled.  Note that access '
-        u'to any label also implies access to the unrestricted surface.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Producer method to update the current policy.  This method will return
-    an error if the policy is too large (more than 50 entries across all
-    lists).
-
-    Args:
-      serviceName: The service protected by this policy.
-
-    Flags:
-      accessList: ACL for access to the unrestricted surface of the service.
-      visibilityLabelAccessLists: ACLs for access to restricted parts of the
-        service.  The map key is the visibility label that is being
-        controlled.  Note that access to any label also implies access to the
-        unrestricted surface.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServiceAccessPolicy(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['accessList'].present:
-      request.accessList = apitools_base.JsonToMessage(messages.ServiceAccessList, FLAGS.accessList)
-    if FLAGS['visibilityLabelAccessLists'].present:
-      request.visibilityLabelAccessLists = apitools_base.JsonToMessage(messages.ServiceAccessPolicy.VisibilityLabelAccessListsValue, FLAGS.visibilityLabelAccessLists)
-    result = client.services.UpdateAccessPolicy(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesUpdateConfig(apitools_base_cli.NewCmd):
-  """Command wrapping services.UpdateConfig."""
-
-  usage = """services_updateConfig <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesUpdateConfig, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'service',
-        None,
-        u'A Service resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'updateMask',
-        None,
-        u'A mask specifying which fields to update. Update mask has been '
-        u'deprecated on UpdateServiceConfig service method. Please use '
-        u'PatchServiceConfig method instead to do partial updates.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Updates the specified subset of the service resource. Equivalent to
-    calling `UpdateService` with only the `service_config` field updated.
-    Operation<response: google.api.Service>
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-
-    Flags:
-      service: A Service resource to be passed as the request body.
-      updateMask: A mask specifying which fields to update. Update mask has
-        been deprecated on UpdateServiceConfig service method. Please use
-        PatchServiceConfig method instead to do partial updates.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesUpdateConfigRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['service'].present:
-      request.service = apitools_base.JsonToMessage(messages.Service, FLAGS.service)
-    if FLAGS['updateMask'].present:
-      request.updateMask = FLAGS.updateMask.decode('utf8')
-    result = client.services.UpdateConfig(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesAccessPolicyQuery(apitools_base_cli.NewCmd):
-  """Command wrapping services_accessPolicy.Query."""
-
-  usage = """services_accessPolicy_query <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesAccessPolicyQuery, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'userEmail',
-        None,
-        u'The user to query access for.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Method to query the accessibility of a service and any associated
-    visibility labels for a specified user.  Members of the producer project
-    may call this method and specify any user.  Any user may call this method,
-    but must specify their own email address. In this case the method will
-    return NOT_FOUND if the user has no access to the service.
-
-    Args:
-      serviceName: The service to query access for.
-
-    Flags:
-      userEmail: The user to query access for.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesAccessPolicyQueryRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['userEmail'].present:
-      request.userEmail = FLAGS.userEmail.decode('utf8')
-    result = client.services_accessPolicy.Query(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesConfigsCreate(apitools_base_cli.NewCmd):
-  """Command wrapping services_configs.Create."""
-
-  usage = """services_configs_create <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesConfigsCreate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'service',
-        None,
-        u'A Service resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Creates a new service config (version) for a managed service. This
-    method only stores the service config, but does not apply the service
-    config to any backend services.
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-
-    Flags:
-      service: A Service resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesConfigsCreateRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['service'].present:
-      request.service = apitools_base.JsonToMessage(messages.Service, FLAGS.service)
-    result = client.services_configs.Create(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesConfigsGet(apitools_base_cli.NewCmd):
-  """Command wrapping services_configs.Get."""
-
-  usage = """services_configs_get <serviceName> <configId>"""
-
-  def __init__(self, name, fv):
-    super(ServicesConfigsGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, serviceName, configId):
-    """Gets a service config (version) for a managed service. If `config_id`
-    is not specified, the latest service config will be returned.
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-      configId: The id of the service config resource. Optional. If it is not
-        specified, the latest version of config will be returned.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesConfigsGetRequest(
-        serviceName=serviceName.decode('utf8'),
-        configId=configId.decode('utf8'),
-        )
-    result = client.services_configs.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesConfigsList(apitools_base_cli.NewCmd):
-  """Command wrapping services_configs.List."""
-
-  usage = """services_configs_list <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesConfigsList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'pageSize',
-        None,
-        u'The max number of items to include in the response list.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'The token of the page to retrieve.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Lists the history of the service config for a managed service, from the
-    newest to the oldest.
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-
-    Flags:
-      pageSize: The max number of items to include in the response list.
-      pageToken: The token of the page to retrieve.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesConfigsListRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['pageSize'].present:
-      request.pageSize = FLAGS.pageSize
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    result = client.services_configs.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesConfigsSubmit(apitools_base_cli.NewCmd):
-  """Command wrapping services_configs.Submit."""
-
-  usage = """services_configs_submit <serviceName>"""
-
-  def __init__(self, name, fv):
-    super(ServicesConfigsSubmit, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'submitConfigSourceRequest',
-        None,
-        u'A SubmitConfigSourceRequest resource to be passed as the request '
-        u'body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName):
-    """Creates a new service config (version) for a managed service based on
-    user-supplied configuration sources files (for example: OpenAPI
-    Specification). This method stores the source configurations as well as
-    the generated service config. It does NOT apply the service config to any
-    backend services.  Operation<response: SubmitConfigSourceResponse>
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-
-    Flags:
-      submitConfigSourceRequest: A SubmitConfigSourceRequest resource to be
-        passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesConfigsSubmitRequest(
-        serviceName=serviceName.decode('utf8'),
-        )
-    if FLAGS['submitConfigSourceRequest'].present:
-      request.submitConfigSourceRequest = apitools_base.JsonToMessage(messages.SubmitConfigSourceRequest, FLAGS.submitConfigSourceRequest)
-    result = client.services_configs.Submit(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesCustomerSettingsGet(apitools_base_cli.NewCmd):
-  """Command wrapping services_customerSettings.Get."""
-
-  usage = """services_customerSettings_get <serviceName> <customerId>"""
-
-  def __init__(self, name, fv):
-    super(ServicesCustomerSettingsGet, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'expand',
-        None,
-        u'Fields to expand in any results.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'view',
-        u'PROJECT_SETTINGS_VIEW_UNSPECIFIED',
-        [u'PROJECT_SETTINGS_VIEW_UNSPECIFIED', u'CONSUMER_VIEW', u'PRODUCER_VIEW', u'ALL'],
-        u'Request only fields for the specified view.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName, customerId):
-    """Retrieves the settings that control the specified customer's usage of
-    the service.
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`. This
-        field is required.
-      customerId: ID for the customer. See the comment for
-        `CustomerSettings.customer_id` field of message for its format. This
-        field is required.
-
-    Flags:
-      expand: Fields to expand in any results.
-      view: Request only fields for the specified view.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesCustomerSettingsGetRequest(
-        serviceName=serviceName.decode('utf8'),
-        customerId=customerId.decode('utf8'),
-        )
-    if FLAGS['expand'].present:
-      request.expand = FLAGS.expand.decode('utf8')
-    if FLAGS['view'].present:
-      request.view = messages.ServicemanagementServicesCustomerSettingsGetRequest.ViewValueValuesEnum(FLAGS.view)
-    result = client.services_customerSettings.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesCustomerSettingsPatch(apitools_base_cli.NewCmd):
-  """Command wrapping services_customerSettings.Patch."""
-
-  usage = """services_customerSettings_patch <serviceName> <customerId>"""
-
-  def __init__(self, name, fv):
-    super(ServicesCustomerSettingsPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'customerSettings',
-        None,
-        u'A CustomerSettings resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'updateMask',
-        None,
-        u'The field mask specifying which fields are to be updated.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName, customerId):
-    """Updates specified subset of the settings that control the specified
-    customer's usage of the service.  Attempts to update a field not
-    controlled by the caller will result in an access denied error.
-    Operation<response: CustomerSettings>
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`. This
-        field is required.
-      customerId: ID for the customer. See the comment for
-        `CustomerSettings.customer_id` field of message for its format. This
-        field is required.
-
-    Flags:
-      customerSettings: A CustomerSettings resource to be passed as the
-        request body.
-      updateMask: The field mask specifying which fields are to be updated.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesCustomerSettingsPatchRequest(
-        serviceName=serviceName.decode('utf8'),
-        customerId=customerId.decode('utf8'),
-        )
-    if FLAGS['customerSettings'].present:
-      request.customerSettings = apitools_base.JsonToMessage(messages.CustomerSettings, FLAGS.customerSettings)
-    if FLAGS['updateMask'].present:
-      request.updateMask = FLAGS.updateMask.decode('utf8')
-    result = client.services_customerSettings.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesProjectSettingsGet(apitools_base_cli.NewCmd):
-  """Command wrapping services_projectSettings.Get."""
-
-  usage = """services_projectSettings_get <serviceName> <consumerProjectId>"""
-
-  def __init__(self, name, fv):
-    super(ServicesProjectSettingsGet, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'expand',
-        None,
-        u'Fields to expand in any results.  By default, the following fields '
-        u'are not present in the result: - `operations` - `quota_usage`',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'view',
-        u'PROJECT_SETTINGS_VIEW_UNSPECIFIED',
-        [u'PROJECT_SETTINGS_VIEW_UNSPECIFIED', u'CONSUMER_VIEW', u'PRODUCER_VIEW', u'ALL'],
-        u'Request only the fields for the specified view.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName, consumerProjectId):
-    """Retrieves the settings that control the specified consumer project's
-    usage of the service.
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-      consumerProjectId: The project ID of the consumer.
-
-    Flags:
-      expand: Fields to expand in any results.  By default, the following
-        fields are not present in the result: - `operations` - `quota_usage`
-      view: Request only the fields for the specified view.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesProjectSettingsGetRequest(
-        serviceName=serviceName.decode('utf8'),
-        consumerProjectId=consumerProjectId.decode('utf8'),
-        )
-    if FLAGS['expand'].present:
-      request.expand = FLAGS.expand.decode('utf8')
-    if FLAGS['view'].present:
-      request.view = messages.ServicemanagementServicesProjectSettingsGetRequest.ViewValueValuesEnum(FLAGS.view)
-    result = client.services_projectSettings.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesProjectSettingsPatch(apitools_base_cli.NewCmd):
-  """Command wrapping services_projectSettings.Patch."""
-
-  usage = """services_projectSettings_patch <serviceName> <consumerProjectId>"""
-
-  def __init__(self, name, fv):
-    super(ServicesProjectSettingsPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'projectSettings',
-        None,
-        u'A ProjectSettings resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'updateMask',
-        None,
-        u'The field mask specifying which fields are to be updated.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName, consumerProjectId):
-    """Updates specified subset of the settings that control the specified
-    consumer project's usage of the service.  Attempts to update a field not
-    controlled by the caller will result in an access denied error.
-    Operation<response: ProjectSettings>
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.  For example: `example.googleapis.com`.
-      consumerProjectId: The project ID of the consumer.
-
-    Flags:
-      projectSettings: A ProjectSettings resource to be passed as the request
-        body.
-      updateMask: The field mask specifying which fields are to be updated.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ServicemanagementServicesProjectSettingsPatchRequest(
-        serviceName=serviceName.decode('utf8'),
-        consumerProjectId=consumerProjectId.decode('utf8'),
-        )
-    if FLAGS['projectSettings'].present:
-      request.projectSettings = apitools_base.JsonToMessage(messages.ProjectSettings, FLAGS.projectSettings)
-    if FLAGS['updateMask'].present:
-      request.updateMask = FLAGS.updateMask.decode('utf8')
-    result = client.services_projectSettings.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ServicesProjectSettingsUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping services_projectSettings.Update."""
-
-  usage = """services_projectSettings_update <serviceName> <consumerProjectId>"""
-
-  def __init__(self, name, fv):
-    super(ServicesProjectSettingsUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'operations',
-        None,
-        u'Read-only view of pending operations affecting this resource, if '
-        u'requested.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'properties',
-        None,
-        u'Service-defined per-consumer properties.  A key-value mapping a '
-        u'string key to a google.protobuf.ListValue proto. Values in the list'
-        u" are typed as defined in the Service configuration's "
-        u'consumer.properties field.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'quotaSettings',
-        None,
-        u'Settings that control how much or how fast the service can be used '
-        u'by the consumer project.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'usageSettings',
-        None,
-        u'Settings that control whether this service is usable by the '
-        u'consumer project.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'visibilitySettings',
-        None,
-        u'Settings that control which features of the service are visible to '
-        u'the consumer project.',
-        flag_values=fv)
-
-  def RunWithArgs(self, serviceName, consumerProjectId):
-    """NOTE: Currently unsupported.  Use PatchProjectSettings instead.
-    Updates the settings that control the specified consumer project's usage
-    of the service.  Attempts to update a field not controlled by the caller
-    will result in an access denied error.  Operation<response:
-    ProjectSettings>
-
-    Args:
-      serviceName: The name of the service.  See the `ServiceManager` overview
-        for naming requirements.
-      consumerProjectId: ID for the project consuming this service.
-
-    Flags:
-      operations: Read-only view of pending operations affecting this
-        resource, if requested.
-      properties: Service-defined per-consumer properties.  A key-value
-        mapping a string key to a google.protobuf.ListValue proto. Values in
-        the list are typed as defined in the Service configuration's
-        consumer.properties field.
-      quotaSettings: Settings that control how much or how fast the service
-        can be used by the consumer project.
-      usageSettings: Settings that control whether this service is usable by
-        the consumer project.
-      visibilitySettings: Settings that control which features of the service
-        are visible to the consumer project.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ProjectSettings(
-        serviceName=serviceName.decode('utf8'),
-        consumerProjectId=consumerProjectId.decode('utf8'),
-        )
-    if FLAGS['operations'].present:
-      request.operations = [apitools_base.JsonToMessage(messages.Operation, x) for x in FLAGS.operations]
-    if FLAGS['properties'].present:
-      request.properties = apitools_base.JsonToMessage(messages.ProjectSettings.PropertiesValue, FLAGS.properties)
-    if FLAGS['quotaSettings'].present:
-      request.quotaSettings = apitools_base.JsonToMessage(messages.QuotaSettings, FLAGS.quotaSettings)
-    if FLAGS['usageSettings'].present:
-      request.usageSettings = apitools_base.JsonToMessage(messages.UsageSettings, FLAGS.usageSettings)
-    if FLAGS['visibilitySettings'].present:
-      request.visibilitySettings = apitools_base.JsonToMessage(messages.VisibilitySettings, FLAGS.visibilitySettings)
-    result = client.services_projectSettings.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ConvertConfig(apitools_base_cli.NewCmd):
-  """Command wrapping v1.ConvertConfig."""
-
-  usage = """convertConfig"""
-
-  def __init__(self, name, fv):
-    super(ConvertConfig, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'configSpec',
-        None,
-        u'Input configuration For this version of API, the supported type is '
-        u'OpenApiSpec',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'openApiSpec',
-        None,
-        u'The OpenAPI specification for an API.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'serviceName',
-        None,
-        u'The service name to use for constructing the normalized service '
-        u'configuration equivalent of the provided configuration '
-        u'specification.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'swaggerSpec',
-        None,
-        u'The swagger specification for an API.',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide
-    config conversion moving forward.  Converts an API specification (e.g.
-    Swagger spec) to an equivalent `google.api.Service`.
-
-    Flags:
-      configSpec: Input configuration For this version of API, the supported
-        type is OpenApiSpec
-      openApiSpec: The OpenAPI specification for an API.
-      serviceName: The service name to use for constructing the normalized
-        service configuration equivalent of the provided configuration
-        specification.
-      swaggerSpec: The swagger specification for an API.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ConvertConfigRequest(
-        )
-    if FLAGS['configSpec'].present:
-      request.configSpec = apitools_base.JsonToMessage(messages.ConvertConfigRequest.ConfigSpecValue, FLAGS.configSpec)
-    if FLAGS['openApiSpec'].present:
-      request.openApiSpec = apitools_base.JsonToMessage(messages.OpenApiSpec, FLAGS.openApiSpec)
-    if FLAGS['serviceName'].present:
-      request.serviceName = FLAGS.serviceName.decode('utf8')
-    if FLAGS['swaggerSpec'].present:
-      request.swaggerSpec = apitools_base.JsonToMessage(messages.SwaggerSpec, FLAGS.swaggerSpec)
-    result = client.v1.ConvertConfig(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-def main(_):
-  appcommands.AddCmd('pyshell', PyShell)
-  appcommands.AddCmd('operations_get', OperationsGet)
-  appcommands.AddCmd('services_convertConfig', ServicesConvertConfig)
-  appcommands.AddCmd('services_create', ServicesCreate)
-  appcommands.AddCmd('services_delete', ServicesDelete)
-  appcommands.AddCmd('services_disable', ServicesDisable)
-  appcommands.AddCmd('services_enable', ServicesEnable)
-  appcommands.AddCmd('services_get', ServicesGet)
-  appcommands.AddCmd('services_getAccessPolicy', ServicesGetAccessPolicy)
-  appcommands.AddCmd('services_getConfig', ServicesGetConfig)
-  appcommands.AddCmd('services_list', ServicesList)
-  appcommands.AddCmd('services_patch', ServicesPatch)
-  appcommands.AddCmd('services_patchConfig', ServicesPatchConfig)
-  appcommands.AddCmd('services_update', ServicesUpdate)
-  appcommands.AddCmd('services_updateAccessPolicy', ServicesUpdateAccessPolicy)
-  appcommands.AddCmd('services_updateConfig', ServicesUpdateConfig)
-  appcommands.AddCmd('services_accessPolicy_query', ServicesAccessPolicyQuery)
-  appcommands.AddCmd('services_configs_create', ServicesConfigsCreate)
-  appcommands.AddCmd('services_configs_get', ServicesConfigsGet)
-  appcommands.AddCmd('services_configs_list', ServicesConfigsList)
-  appcommands.AddCmd('services_configs_submit', ServicesConfigsSubmit)
-  appcommands.AddCmd('services_customerSettings_get', ServicesCustomerSettingsGet)
-  appcommands.AddCmd('services_customerSettings_patch', ServicesCustomerSettingsPatch)
-  appcommands.AddCmd('services_projectSettings_get', ServicesProjectSettingsGet)
-  appcommands.AddCmd('services_projectSettings_patch', ServicesProjectSettingsPatch)
-  appcommands.AddCmd('services_projectSettings_update', ServicesProjectSettingsUpdate)
-  appcommands.AddCmd('convertConfig', ConvertConfig)
-
-  apitools_base_cli.SetupLogger()
-  if hasattr(appcommands, 'SetDefaultCommand'):
-    appcommands.SetDefaultCommand('pyshell')
-
-
-run_main = apitools_base_cli.run_main
-
-if __name__ == '__main__':
-  appcommands.Run()
diff --git a/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_client.py b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_client.py
index 26291bc..a72936e 100644
--- a/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_client.py
+++ b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_client.py
@@ -24,7 +24,7 @@
                get_credentials=True, http=None, model=None,
                log_request=False, log_response=False,
                credentials_args=None, default_global_params=None,
-               additional_http_headers=None):
+               additional_http_headers=None, response_encoding=None):
     """Create a new servicemanagement handle."""
     url = url or self.BASE_URL
     super(ServicemanagementV1, self).__init__(
@@ -33,7 +33,8 @@
         log_request=log_request, log_response=log_response,
         credentials_args=credentials_args,
         default_global_params=default_global_params,
-        additional_http_headers=additional_http_headers)
+        additional_http_headers=additional_http_headers,
+        response_encoding=response_encoding)
     self.operations = self.OperationsService(self)
     self.services_accessPolicy = self.ServicesAccessPolicyService(self)
     self.services_configs = self.ServicesConfigsService(self)
@@ -53,7 +54,7 @@
           }
 
     def Get(self, request, global_params=None):
-      """Gets the latest state of a long-running operation.  Clients can use this.
+      r"""Gets the latest state of a long-running operation.  Clients can use this.
 method to poll the operation result at intervals as recommended by the API
 service.
 
@@ -91,7 +92,7 @@
           }
 
     def Query(self, request, global_params=None):
-      """Method to query the accessibility of a service and any associated.
+      r"""Method to query the accessibility of a service and any associated.
 visibility labels for a specified user.
 
 Members of the producer project may call this method and specify any user.
@@ -134,7 +135,7 @@
           }
 
     def Create(self, request, global_params=None):
-      """Creates a new service config (version) for a managed service. This method.
+      r"""Creates a new service config (version) for a managed service. This method.
 only stores the service config, but does not apply the service config to
 any backend services.
 
@@ -162,7 +163,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Gets a service config (version) for a managed service. If `config_id` is.
+      r"""Gets a service config (version) for a managed service. If `config_id` is.
 not specified, the latest service config will be returned.
 
       Args:
@@ -189,7 +190,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Lists the history of the service config for a managed service,.
+      r"""Lists the history of the service config for a managed service,.
 from the newest to the oldest.
 
       Args:
@@ -216,7 +217,7 @@
     )
 
     def Submit(self, request, global_params=None):
-      """Creates a new service config (version) for a managed service based on.
+      r"""Creates a new service config (version) for a managed service based on.
 user-supplied configuration sources files (for example: OpenAPI
 Specification). This method stores the source configurations as well as the
 generated service config. It does NOT apply the service config to any
@@ -258,7 +259,7 @@
           }
 
     def Get(self, request, global_params=None):
-      """Retrieves the settings that control the specified customer's usage of the.
+      r"""Retrieves the settings that control the specified customer's usage of the.
 service.
 
       Args:
@@ -285,7 +286,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates specified subset of the settings that control the specified.
+      r"""Updates specified subset of the settings that control the specified.
 customer's usage of the service.  Attempts to update a field not
 controlled by the caller will result in an access denied error.
 
@@ -325,7 +326,7 @@
           }
 
     def Get(self, request, global_params=None):
-      """Retrieves the settings that control the specified consumer project's usage.
+      r"""Retrieves the settings that control the specified consumer project's usage.
 of the service.
 
       Args:
@@ -352,7 +353,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates specified subset of the settings that control the specified.
+      r"""Updates specified subset of the settings that control the specified.
 consumer project's usage of the service.  Attempts to update a field not
 controlled by the caller will result in an access denied error.
 
@@ -382,7 +383,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """NOTE: Currently unsupported.  Use PatchProjectSettings instead.
+      r"""NOTE: Currently unsupported.  Use PatchProjectSettings instead.
 
 Updates the settings that control the specified consumer project's usage
 of the service.  Attempts to update a field not controlled by the caller
@@ -424,7 +425,7 @@
           }
 
     def ConvertConfig(self, request, global_params=None):
-      """DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide.
+      r"""DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide.
 config conversion moving forward.
 
 Converts an API specification (e.g. Swagger spec) to an
@@ -454,7 +455,7 @@
     )
 
     def Create(self, request, global_params=None):
-      """Creates a new managed service.
+      r"""Creates a new managed service.
 
 Operation<response: ManagedService>
 
@@ -482,7 +483,7 @@
     )
 
     def Delete(self, request, global_params=None):
-      """Deletes a managed service.
+      r"""Deletes a managed service.
 
 Operation<response: google.protobuf.Empty>
 
@@ -510,7 +511,7 @@
     )
 
     def Disable(self, request, global_params=None):
-      """Disable a managed service for a project.
+      r"""Disable a managed service for a project.
 Google Service Management will only disable the managed service even if
 there are other services depend on the managed service.
 
@@ -540,7 +541,7 @@
     )
 
     def Enable(self, request, global_params=None):
-      """Enable a managed service for a project with default setting.
+      r"""Enable a managed service for a project with default setting.
 If the managed service has dependencies, they will be enabled as well.
 
 Operation<response: EnableServiceResponse>
@@ -569,7 +570,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Gets a managed service. If the `consumer_project_id` is specified,.
+      r"""Gets a managed service. If the `consumer_project_id` is specified,.
 the project's settings for the specified service are also returned.
 
       Args:
@@ -596,7 +597,7 @@
     )
 
     def GetAccessPolicy(self, request, global_params=None):
-      """Producer method to retrieve current policy.
+      r"""Producer method to retrieve current policy.
 
       Args:
         request: (ServicemanagementServicesGetAccessPolicyRequest) input message
@@ -622,7 +623,7 @@
     )
 
     def GetConfig(self, request, global_params=None):
-      """Gets a service config (version) for a managed service. If `config_id` is.
+      r"""Gets a service config (version) for a managed service. If `config_id` is.
 not specified, the latest service config will be returned.
 
       Args:
@@ -649,7 +650,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Lists all managed services. If the `consumer_project_id` is specified,.
+      r"""Lists all managed services. If the `consumer_project_id` is specified,.
 the project's settings for the specified service are also returned.
 
       Args:
@@ -676,7 +677,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates the specified subset of the configuration. If the specified service.
+      r"""Updates the specified subset of the configuration. If the specified service.
 does not exists the patch operation fails.
 
 Operation<response: ManagedService>
@@ -705,7 +706,7 @@
     )
 
     def PatchConfig(self, request, global_params=None):
-      """Updates the specified subset of the service resource. Equivalent to.
+      r"""Updates the specified subset of the service resource. Equivalent to.
 calling `PatchService` with only the `service_config` field updated.
 
 Operation<response: google.api.Service>
@@ -734,7 +735,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates the configuration of a service.  If the specified service does not.
+      r"""Updates the configuration of a service.  If the specified service does not.
 already exist, then it is created.
 
 Operation<response: ManagedService>
@@ -763,7 +764,7 @@
     )
 
     def UpdateAccessPolicy(self, request, global_params=None):
-      """Producer method to update the current policy.  This method will return an.
+      r"""Producer method to update the current policy.  This method will return an.
 error if the policy is too large (more than 50 entries across all lists).
 
       Args:
@@ -790,7 +791,7 @@
     )
 
     def UpdateConfig(self, request, global_params=None):
-      """Updates the specified subset of the service resource. Equivalent to.
+      r"""Updates the specified subset of the service resource. Equivalent to.
 calling `UpdateService` with only the `service_config` field updated.
 
 Operation<response: google.api.Service>
@@ -829,7 +830,7 @@
           }
 
     def ConvertConfig(self, request, global_params=None):
-      """DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide.
+      r"""DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide.
 config conversion moving forward.
 
 Converts an API specification (e.g. Swagger spec) to an
diff --git a/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_messages.py b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_messages.py
index 9291cf3..65b660c 100644
--- a/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_messages.py
+++ b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_messages.py
@@ -13,7 +13,7 @@
 
 
 class Api(_messages.Message):
-  """Api is a light-weight descriptor for a protocol buffer service.
+  r"""Api is a light-weight descriptor for a protocol buffer service.
 
   Enums:
     SyntaxValueValuesEnum: The source syntax of the service.
@@ -45,7 +45,7 @@
   """
 
   class SyntaxValueValuesEnum(_messages.Enum):
-    """The source syntax of the service.
+    r"""The source syntax of the service.
 
     Values:
       SYNTAX_PROTO2: Syntax `proto2`.
@@ -64,7 +64,7 @@
 
 
 class AreaUnderCurveParams(_messages.Message):
-  """AreaUnderCurveParams groups the metrics relevant to generating duration
+  r"""AreaUnderCurveParams groups the metrics relevant to generating duration
   based metric from base (snapshot) metric and delta (change) metric.  The
   generated metric has two dimensions:    resource usage metric and the
   duration the metric applies.  Essentially the generated metric is the Area
@@ -90,9 +90,9 @@
 
 
 class AuthProvider(_messages.Message):
-  """Configuration for an anthentication provider, including support for [JSON
-  Web Token (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-
-  token-32).
+  r"""Configuration for an anthentication provider, including support for
+  [JSON Web Token (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-
+  web-token-32).
 
   Fields:
     id: The unique identifier of the auth provider. It will be referred to by
@@ -117,8 +117,8 @@
 
 
 class AuthRequirement(_messages.Message):
-  """User-defined authentication requirements, including support for [JSON Web
-  Token (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-
+  r"""User-defined authentication requirements, including support for [JSON
+  Web Token (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-
   token-32).
 
   Fields:
@@ -141,7 +141,7 @@
 
 
 class Authentication(_messages.Message):
-  """`Authentication` defines the authentication configuration for an API.
+  r"""`Authentication` defines the authentication configuration for an API.
   Example for an API targeted for external use:      name:
   calendar.googleapis.com     authentication:       rules:       - selector:
   "*"         oauth:           canonical_scopes:
@@ -160,7 +160,7 @@
 
 
 class AuthenticationRule(_messages.Message):
-  """Authentication rules for the service.  By default, if a method has any
+  r"""Authentication rules for the service.  By default, if a method has any
   authentication requirements, every request must include a valid credential
   matching one of the requirements. It's an error to include more than one
   kind of credential in a single request.  If a method doesn't have any auth
@@ -183,7 +183,7 @@
 
 
 class Backend(_messages.Message):
-  """`Backend` defines the backend configuration for a service.
+  r"""`Backend` defines the backend configuration for a service.
 
   Fields:
     rules: A list of backend rules providing configuration for individual API
@@ -194,7 +194,7 @@
 
 
 class BackendRule(_messages.Message):
-  """A backend rule provides configuration for an individual API element.
+  r"""A backend rule provides configuration for an individual API element.
 
   Fields:
     address: The address of the API backend.
@@ -210,7 +210,7 @@
 
 
 class Billing(_messages.Message):
-  """Billing related configuration of the service.  The following example
+  r"""Billing related configuration of the service.  The following example
   shows how to configure metrics for billing:      metrics:     - name:
   library.googleapis.com/read_calls       metric_kind: DELTA       value_type:
   INT64     - name: library.googleapis.com/write_calls       metric_kind:
@@ -245,7 +245,7 @@
 
 
 class BillingStatusRule(_messages.Message):
-  """Defines the billing status requirements for operations.  When used with
+  r"""Defines the billing status requirements for operations.  When used with
   [Service Control API](https://cloud.google.com/service-control/), the
   following statuses are supported:  - **current**: the associated billing
   account is up to date and capable of                paying for resource
@@ -268,7 +268,7 @@
 
 
 class CompositeOperationMetadata(_messages.Message):
-  """Metadata for composite operations.
+  r"""Metadata for composite operations.
 
   Messages:
     OriginalRequestValue: Original request that triggered this operation.
@@ -297,7 +297,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class OriginalRequestValue(_messages.Message):
-    """Original request that triggered this operation.
+    r"""Original request that triggered this operation.
 
     Messages:
       AdditionalProperty: An additional property for a OriginalRequestValue
@@ -309,7 +309,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a OriginalRequestValue object.
+      r"""An additional property for a OriginalRequestValue object.
 
       Fields:
         key: Name of the additional property.
@@ -323,7 +323,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ResponseFieldMasksValue(_messages.Message):
-    """Defines which part of the response a child operation will contribute.
+    r"""Defines which part of the response a child operation will contribute.
     Each key of the map is the name of a child operation. Each value is a
     field mask that identifies what that child operation contributes to the
     response, for example, "quota_settings", "visiblity_settings", etc.
@@ -338,7 +338,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ResponseFieldMasksValue object.
+      r"""An additional property for a ResponseFieldMasksValue object.
 
       Fields:
         key: Name of the additional property.
@@ -357,7 +357,7 @@
 
 
 class ConfigFile(_messages.Message):
-  """Generic specification of a source configuration file
+  r"""Generic specification of a source configuration file
 
   Enums:
     FileTypeValueValuesEnum: The kind of configuration file represented. This
@@ -375,7 +375,7 @@
   """
 
   class FileTypeValueValuesEnum(_messages.Enum):
-    """The kind of configuration file represented. This is used to determine
+    r"""The kind of configuration file represented. This is used to determine
     the method for generating `google.api.Service` using this file.
 
     Values:
@@ -402,14 +402,14 @@
 
 
 class ConfigOptions(_messages.Message):
-  """A set of options to cover use of source config within `ServiceManager`
+  r"""A set of options to cover use of source config within `ServiceManager`
   and related tools.
   """
 
 
 
 class ConfigSource(_messages.Message):
-  """Represents a user-specified configuration for a service (as opposed to
+  r"""Represents a user-specified configuration for a service (as opposed to
   the the generated service config form provided by `google.api.Service`).
   This is meant to encode service config as manipulated directly by customers,
   rather than the config form resulting from toolchain generation and
@@ -435,8 +435,8 @@
 
 
 class Context(_messages.Message):
-  """`Context` defines which contexts an API requests.  Example:      context:
-  rules:       - selector: "*"         requested:         -
+  r"""`Context` defines which contexts an API requests.  Example:
+  context:       rules:       - selector: "*"         requested:         -
   google.rpc.context.ProjectContext         - google.rpc.context.OriginContext
   The above specifies that all methods in the API request
   `google.rpc.context.ProjectContext` and `google.rpc.context.OriginContext`.
@@ -450,7 +450,7 @@
 
 
 class ContextRule(_messages.Message):
-  """A context rule provides information about the context for an individual
+  r"""A context rule provides information about the context for an individual
   API element.
 
   Fields:
@@ -466,7 +466,7 @@
 
 
 class Control(_messages.Message):
-  """Selects and configures the service controller used by the service.  The
+  r"""Selects and configures the service controller used by the service.  The
   service controller handles features like abuse, quota, billing, logging,
   monitoring, etc.
 
@@ -479,7 +479,7 @@
 
 
 class ConvertConfigRequest(_messages.Message):
-  """Request message for `ConvertConfig` method.
+  r"""Request message for `ConvertConfig` method.
 
   Messages:
     ConfigSpecValue: Input configuration For this version of API, the
@@ -497,7 +497,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ConfigSpecValue(_messages.Message):
-    """Input configuration For this version of API, the supported type is
+    r"""Input configuration For this version of API, the supported type is
     OpenApiSpec
 
     Messages:
@@ -509,7 +509,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ConfigSpecValue object.
+      r"""An additional property for a ConfigSpecValue object.
 
       Fields:
         key: Name of the additional property.
@@ -528,7 +528,7 @@
 
 
 class ConvertConfigResponse(_messages.Message):
-  """Response message for `ConvertConfig` method.
+  r"""Response message for `ConvertConfig` method.
 
   Fields:
     diagnostics: Any errors or warnings that occured during config conversion.
@@ -541,7 +541,7 @@
 
 
 class CustomError(_messages.Message):
-  """Customize service error responses.  For example, list any service
+  r"""Customize service error responses.  For example, list any service
   specific protobuf types that can appear in error detail lists of error
   responses.  Example:      custom_error:       types:       -
   google.foo.v1.CustomError       - google.foo.v1.AnotherError
@@ -558,7 +558,7 @@
 
 
 class CustomErrorRule(_messages.Message):
-  """A custom error rule.
+  r"""A custom error rule.
 
   Fields:
     isErrorType: Mark this message as possible payload in error response.
@@ -573,7 +573,7 @@
 
 
 class CustomHttpPattern(_messages.Message):
-  """A custom pattern is used for defining custom HTTP verb.
+  r"""A custom pattern is used for defining custom HTTP verb.
 
   Fields:
     kind: The name of this custom HTTP verb.
@@ -585,7 +585,7 @@
 
 
 class CustomerSettings(_messages.Message):
-  """Settings that control how a customer (identified by a billing account)
+  r"""Settings that control how a customer (identified by a billing account)
   uses a service
 
   Fields:
@@ -606,7 +606,7 @@
 
 
 class Diagnostic(_messages.Message):
-  """A collection that represents a diagnostic message (error or warning)
+  r"""A collection that represents a diagnostic message (error or warning)
 
   Enums:
     KindValueValuesEnum: The kind of diagnostic information provided.
@@ -618,7 +618,7 @@
   """
 
   class KindValueValuesEnum(_messages.Enum):
-    """The kind of diagnostic information provided.
+    r"""The kind of diagnostic information provided.
 
     Values:
       WARNING: Warnings and errors
@@ -633,7 +633,7 @@
 
 
 class DisableServiceRequest(_messages.Message):
-  """Request message for DisableService method.
+  r"""Request message for DisableService method.
 
   Fields:
     consumerId: The identity of consumer resource which service disablement
@@ -647,7 +647,7 @@
 
 
 class Documentation(_messages.Message):
-  """`Documentation` provides the information for describing a service.
+  r"""`Documentation` provides the information for describing a service.
   Example: <pre><code>documentation:   summary: >     The Google Calendar API
   gives access     to most calendar features.   pages:   - name: Overview
   content: &#40;== include google/foo/overview.md ==&#41;   - name: Tutorial
@@ -705,7 +705,7 @@
 
 
 class DocumentationRule(_messages.Message):
-  """A documentation rule provides information about individual API elements.
+  r"""A documentation rule provides information about individual API elements.
 
   Fields:
     deprecationDescription: Deprecation description of the selected
@@ -725,7 +725,7 @@
 
 
 class EffectiveQuotaGroup(_messages.Message):
-  """An effective quota group contains both the metadata for a quota group as
+  r"""An effective quota group contains both the metadata for a quota group as
   derived from the service config, and the effective limits in that group as
   calculated from producer and consumer overrides together with service
   defaults.
@@ -742,7 +742,7 @@
   """
 
   class BillingInteractionValueValuesEnum(_messages.Enum):
-    """BillingInteractionValueValuesEnum enum type.
+    r"""BillingInteractionValueValuesEnum enum type.
 
     Values:
       BILLING_INTERACTION_UNSPECIFIED: The interaction between this quota
@@ -765,7 +765,7 @@
 
 
 class EffectiveQuotaLimit(_messages.Message):
-  """An effective quota limit contains the metadata for a quota limit as
+  r"""An effective quota limit contains the metadata for a quota limit as
   derived from the service config, together with fields that describe the
   effective limit value and what overrides can be applied to it.
 
@@ -787,7 +787,7 @@
 
 
 class EnableServiceRequest(_messages.Message):
-  """Request message for EnableService method.
+  r"""Request message for EnableService method.
 
   Fields:
     consumerId: The identity of consumer resource which service enablement
@@ -801,7 +801,7 @@
 
 
 class Enum(_messages.Message):
-  """Enum type definition.
+  r"""Enum type definition.
 
   Enums:
     SyntaxValueValuesEnum: The source syntax.
@@ -815,7 +815,7 @@
   """
 
   class SyntaxValueValuesEnum(_messages.Enum):
-    """The source syntax.
+    r"""The source syntax.
 
     Values:
       SYNTAX_PROTO2: Syntax `proto2`.
@@ -832,7 +832,7 @@
 
 
 class EnumValue(_messages.Message):
-  """Enum value definition.
+  r"""Enum value definition.
 
   Fields:
     name: Enum value name.
@@ -846,7 +846,7 @@
 
 
 class Field(_messages.Message):
-  """A single field of a message type.
+  r"""A single field of a message type.
 
   Enums:
     CardinalityValueValuesEnum: The field cardinality.
@@ -871,7 +871,7 @@
   """
 
   class CardinalityValueValuesEnum(_messages.Enum):
-    """The field cardinality.
+    r"""The field cardinality.
 
     Values:
       CARDINALITY_UNKNOWN: For fields with unknown cardinality.
@@ -885,7 +885,7 @@
     CARDINALITY_REPEATED = 3
 
   class KindValueValuesEnum(_messages.Enum):
-    """The field type.
+    r"""The field type.
 
     Values:
       TYPE_UNKNOWN: Field type unknown.
@@ -941,7 +941,7 @@
 
 
 class File(_messages.Message):
-  """A single swagger specification file.
+  r"""A single swagger specification file.
 
   Fields:
     contents: The contents of the swagger spec file.
@@ -953,7 +953,7 @@
 
 
 class Http(_messages.Message):
-  """Defines the HTTP configuration for a service. It contains a list of
+  r"""Defines the HTTP configuration for a service. It contains a list of
   HttpRule, each specifying the mapping of an RPC method to one or more HTTP
   REST API methods.
 
@@ -965,7 +965,7 @@
 
 
 class HttpRule(_messages.Message):
-  """`HttpRule` defines the mapping of an RPC method to one or more HTTP REST
+  r"""`HttpRule` defines the mapping of an RPC method to one or more HTTP REST
   APIs.  The mapping determines what portions of the request message are
   populated from the path, query parameters, or body of the HTTP request.  The
   mapping is typically specified as an `google.api.http` annotation, see
@@ -1094,7 +1094,7 @@
 
 
 class LabelDescriptor(_messages.Message):
-  """A description of a label.
+  r"""A description of a label.
 
   Enums:
     ValueTypeValueValuesEnum: The type of data that can be assigned to the
@@ -1107,7 +1107,7 @@
   """
 
   class ValueTypeValueValuesEnum(_messages.Enum):
-    """The type of data that can be assigned to the label.
+    r"""The type of data that can be assigned to the label.
 
     Values:
       STRING: A variable-length string. This is the default.
@@ -1124,7 +1124,7 @@
 
 
 class ListServiceConfigsResponse(_messages.Message):
-  """Response message for ListServiceConfigs method.
+  r"""Response message for ListServiceConfigs method.
 
   Fields:
     nextPageToken: The token of the next page of results.
@@ -1136,7 +1136,7 @@
 
 
 class ListServicesResponse(_messages.Message):
-  """Response message for `ListServices` method.
+  r"""Response message for `ListServices` method.
 
   Fields:
     nextPageToken: Token that can be passed to `ListServices` to resume a
@@ -1149,7 +1149,7 @@
 
 
 class LogDescriptor(_messages.Message):
-  """A description of a log type. Example in YAML format:      - name:
+  r"""A description of a log type. Example in YAML format:      - name:
   library.googleapis.com/activity_history       description: The history of
   borrowing and returning library items.       display_name: Activity
   labels:       - key: /customer_id         description: Identifier of a
@@ -1176,8 +1176,8 @@
 
 
 class Logging(_messages.Message):
-  """Logging configuration of the service.  The following example shows how to
-  configure logs to be sent to the producer and consumer projects. In the
+  r"""Logging configuration of the service.  The following example shows how
+  to configure logs to be sent to the producer and consumer projects. In the
   example, the `library.googleapis.com/activity_history` log is sent to both
   the producer and consumer projects, whereas the
   `library.googleapis.com/purchase_history` log is only sent to the producer
@@ -1210,7 +1210,7 @@
 
 
 class LoggingDestination(_messages.Message):
-  """Configuration of a specific logging destination (the producer project or
+  r"""Configuration of a specific logging destination (the producer project or
   the consumer project).
 
   Fields:
@@ -1225,7 +1225,7 @@
 
 
 class ManagedService(_messages.Message):
-  """The full representation of an API Service that is managed by the
+  r"""The full representation of an API Service that is managed by the
   `ServiceManager` API.  Includes both the service configuration, as well as
   other control plane deployment related information.
 
@@ -1260,7 +1260,7 @@
 
 
 class MediaDownload(_messages.Message):
-  """Do not use this. For media support, add instead
+  r"""Do not use this. For media support, add instead
   [][google.bytestream.RestByteStream] as an API to your configuration.
 
   Fields:
@@ -1271,7 +1271,7 @@
 
 
 class MediaUpload(_messages.Message):
-  """Do not use this. For media support, add instead
+  r"""Do not use this. For media support, add instead
   [][google.bytestream.RestByteStream] as an API to your configuration.
 
   Fields:
@@ -1282,7 +1282,7 @@
 
 
 class Method(_messages.Message):
-  """Method represents a method of an api.
+  r"""Method represents a method of an api.
 
   Enums:
     SyntaxValueValuesEnum: The source syntax of this method.
@@ -1298,7 +1298,7 @@
   """
 
   class SyntaxValueValuesEnum(_messages.Enum):
-    """The source syntax of this method.
+    r"""The source syntax of this method.
 
     Values:
       SYNTAX_PROTO2: Syntax `proto2`.
@@ -1317,7 +1317,7 @@
 
 
 class MetricDescriptor(_messages.Message):
-  """Defines a metric type and its schema.
+  r"""Defines a metric type and its schema.
 
   Enums:
     MetricKindValueValuesEnum: Whether the metric records instantaneous
@@ -1382,7 +1382,7 @@
   """
 
   class MetricKindValueValuesEnum(_messages.Enum):
-    """Whether the metric records instantaneous values, changes to a value,
+    r"""Whether the metric records instantaneous values, changes to a value,
     etc.
 
     Values:
@@ -1398,7 +1398,7 @@
     CUMULATIVE = 3
 
   class ValueTypeValueValuesEnum(_messages.Enum):
-    """Whether the measurement is an integer, a floating-point number, etc.
+    r"""Whether the measurement is an integer, a floating-point number, etc.
 
     Values:
       VALUE_TYPE_UNSPECIFIED: Do not use this default value.
@@ -1430,7 +1430,7 @@
 
 
 class Mixin(_messages.Message):
-  """Declares an API to be included in this API. The including API must
+  r"""Declares an API to be included in this API. The including API must
   redeclare all the methods from the included API, but documentation and
   options are inherited as follows:  - If after comment and whitespace
   stripping, the documentation   string of the redeclared method is empty, it
@@ -1475,8 +1475,8 @@
 
 
 class MonitoredResourceDescriptor(_messages.Message):
-  """An object that describes the schema of a MonitoredResource object using a
-  type name and a set of labels.  For example, the monitored resource
+  r"""An object that describes the schema of a MonitoredResource object using
+  a type name and a set of labels.  For example, the monitored resource
   descriptor for Google Compute Engine VM instances has a type of
   `"gce_instance"` and specifies the use of the labels `"instance_id"` and
   `"zone"` to identify particular VM instances.  Different APIs can support
@@ -1512,7 +1512,7 @@
 
 
 class Monitoring(_messages.Message):
-  """Monitoring configuration of the service.  The example below shows how to
+  r"""Monitoring configuration of the service.  The example below shows how to
   configure monitored resources and metrics for monitoring. In the example, a
   monitored resource and two metrics are defined. The
   `library.googleapis.com/book/returned_count` metric is sent to both producer
@@ -1549,7 +1549,7 @@
 
 
 class MonitoringDestination(_messages.Message):
-  """Configuration of a specific monitoring destination (the producer project
+  r"""Configuration of a specific monitoring destination (the producer project
   or the consumer project).
 
   Fields:
@@ -1564,7 +1564,7 @@
 
 
 class OAuthRequirements(_messages.Message):
-  """OAuth scopes are a way to define data and permissions on data. For
+  r"""OAuth scopes are a way to define data and permissions on data. For
   example, there are scopes defined for "Read-only access to Google Calendar"
   and "Access to Cloud Platform". Users can consent to a scope for an
   application, giving it permission to access that data on their behalf.
@@ -1591,7 +1591,7 @@
 
 
 class OpenApiSpec(_messages.Message):
-  """A collection of OpenAPI specification files.
+  r"""A collection of OpenAPI specification files.
 
   Fields:
     openApiFiles: Individual files.
@@ -1601,8 +1601,8 @@
 
 
 class Operation(_messages.Message):
-  """This resource represents a long-running operation that is the result of a
-  network API call.
+  r"""This resource represents a long-running operation that is the result of
+  a network API call.
 
   Messages:
     MetadataValue: Service-specific metadata associated with the operation.
@@ -1642,7 +1642,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class MetadataValue(_messages.Message):
-    """Service-specific metadata associated with the operation.  It typically
+    r"""Service-specific metadata associated with the operation.  It typically
     contains progress information and common metadata such as create time.
     Some services might not provide such metadata.  Any method that returns a
     long-running operation should document the metadata type, if any.
@@ -1656,7 +1656,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a MetadataValue object.
+      r"""An additional property for a MetadataValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1670,7 +1670,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ResponseValue(_messages.Message):
-    """The normal response of the operation in case of success.  If the
+    r"""The normal response of the operation in case of success.  If the
     original method returns no data on success, such as `Delete`, the response
     is `google.protobuf.Empty`.  If the original method is standard
     `Get`/`Create`/`Update`, the response should be the resource.  For other
@@ -1687,7 +1687,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ResponseValue object.
+      r"""An additional property for a ResponseValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1707,7 +1707,7 @@
 
 
 class OperationMetadata(_messages.Message):
-  """The metadata associated with a long running operation resource.
+  r"""The metadata associated with a long running operation resource.
 
   Fields:
     progressPercentage: Percentage of completion of this operation, ranging
@@ -1726,7 +1726,7 @@
 
 
 class Option(_messages.Message):
-  """A protocol buffer option, which can be attached to a message, field,
+  r"""A protocol buffer option, which can be attached to a message, field,
   enumeration, etc.
 
   Messages:
@@ -1739,7 +1739,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ValueValue(_messages.Message):
-    """The option's value. For example, `"com.google.protobuf"`.
+    r"""The option's value. For example, `"com.google.protobuf"`.
 
     Messages:
       AdditionalProperty: An additional property for a ValueValue object.
@@ -1750,7 +1750,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ValueValue object.
+      r"""An additional property for a ValueValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1767,8 +1767,8 @@
 
 
 class Page(_messages.Message):
-  """Represents a documentation page. A page can contain subpages to represent
-  nested documentation set structure.
+  r"""Represents a documentation page. A page can contain subpages to
+  represent nested documentation set structure.
 
   Fields:
     content: The Markdown content of the page. You can use <code>&#40;==
@@ -1791,7 +1791,7 @@
 
 
 class ProjectProperties(_messages.Message):
-  """A descriptor for defining project properties for a service. One service
+  r"""A descriptor for defining project properties for a service. One service
   may have many consumer projects, and the service may want to behave
   differently depending on some properties on the project. For example, a
   project may be associated with a school, or a business, or a government
@@ -1810,7 +1810,7 @@
 
 
 class ProjectSettings(_messages.Message):
-  """Settings that control how a consumer project uses a service.
+  r"""Settings that control how a consumer project uses a service.
 
   Messages:
     PropertiesValue: Service-defined per-consumer properties.  A key-value
@@ -1838,7 +1838,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class PropertiesValue(_messages.Message):
-    """Service-defined per-consumer properties.  A key-value mapping a string
+    r"""Service-defined per-consumer properties.  A key-value mapping a string
     key to a google.protobuf.ListValue proto. Values in the list are typed as
     defined in the Service configuration's consumer.properties field.
 
@@ -1850,7 +1850,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a PropertiesValue object.
+      r"""An additional property for a PropertiesValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1872,7 +1872,7 @@
 
 
 class Property(_messages.Message):
-  """Defines project properties.  API services can define properties that can
+  r"""Defines project properties.  API services can define properties that can
   be assigned to consumer projects so that backends can perform response
   customization without having to make additional calls or maintain additional
   storage. For example, Maps API defines properties that controls map tile
@@ -1890,7 +1890,7 @@
   """
 
   class TypeValueValuesEnum(_messages.Enum):
-    """The type of this property.
+    r"""The type of this property.
 
     Values:
       UNSPECIFIED: The type is unspecified, and will result in an error.
@@ -1911,8 +1911,9 @@
 
 
 class ProtoDescriptor(_messages.Message):
-  """Contains a serialized protoc-generated protocol buffer message descriptor
-  set along with a URL that describes the type of the descriptor message.
+  r"""Contains a serialized protoc-generated protocol buffer message
+  descriptor set along with a URL that describes the type of the descriptor
+  message.
 
   Fields:
     typeUrl: A URL/resource name whose content describes the type of the
@@ -1932,7 +1933,7 @@
 
 
 class ProtoSpec(_messages.Message):
-  """A collection of protocol buffer service specification files.
+  r"""A collection of protocol buffer service specification files.
 
   Fields:
     protoDescriptor: A complete descriptor of a protocol buffer specification
@@ -1942,7 +1943,7 @@
 
 
 class QueryUserAccessResponse(_messages.Message):
-  """Request message for QueryUserAccess method.
+  r"""Request message for QueryUserAccess method.
 
   Fields:
     accessibleVisibilityLabels: Any visibility labels on the service that are
@@ -1956,7 +1957,7 @@
 
 
 class Quota(_messages.Message):
-  """Quota configuration helps to achieve fairness and budgeting in service
+  r"""Quota configuration helps to achieve fairness and budgeting in service
   usage.  - Fairness is achieved through the use of short-term quota limits
   that are usually defined over a time window of several seconds or   minutes.
   When such a limit is applied, for example at the user   level, it ensures
@@ -2030,7 +2031,7 @@
 
 
 class QuotaGroup(_messages.Message):
-  """`QuotaGroup` defines a set of quota limits to enforce.
+  r"""`QuotaGroup` defines a set of quota limits to enforce.
 
   Fields:
     billable: Indicates if the quota limits defined in this quota group apply
@@ -2055,7 +2056,7 @@
 
 
 class QuotaGroupMapping(_messages.Message):
-  """A quota group mapping.
+  r"""A quota group mapping.
 
   Fields:
     cost: Number of tokens to consume for each request. This allows different
@@ -2070,7 +2071,7 @@
 
 
 class QuotaInfo(_messages.Message):
-  """Metadata about an individual quota, containing usage and limit
+  r"""Metadata about an individual quota, containing usage and limit
   information.
 
   Fields:
@@ -2088,7 +2089,7 @@
 
 
 class QuotaLimit(_messages.Message):
-  """`QuotaLimit` defines a specific limit that applies over a specified
+  r"""`QuotaLimit` defines a specific limit that applies over a specified
   duration for a limit type. There can be at most one limit for a duration and
   limit type combination defined within a `QuotaGroup`.
 
@@ -2152,7 +2153,7 @@
   """
 
   class LimitByValueValuesEnum(_messages.Enum):
-    """Limit type to use for enforcing this quota limit. Each unique value
+    r"""Limit type to use for enforcing this quota limit. Each unique value
     gets the defined number of tokens to consume from. For a quota limit that
     uses user type, each user making requests through the same client
     application project will get his/her own pool of tokens to consume,
@@ -2180,8 +2181,8 @@
 
 
 class QuotaLimitOverride(_messages.Message):
-  """Specifies a custom quota limit that is applied for this consumer project.
-  This overrides the default value in google.api.QuotaLimit.
+  r"""Specifies a custom quota limit that is applied for this consumer
+  project. This overrides the default value in google.api.QuotaLimit.
 
   Fields:
     limit: The new limit for this project. May be -1 (unlimited), 0 (block),
@@ -2196,7 +2197,7 @@
 
 
 class QuotaRule(_messages.Message):
-  """`QuotaRule` maps a method to a set of `QuotaGroup`s.
+  r"""`QuotaRule` maps a method to a set of `QuotaGroup`s.
 
   Fields:
     disableQuota: Indicates if quota checking should be enforced. Quota will
@@ -2215,8 +2216,9 @@
 
 
 class QuotaSettings(_messages.Message):
-  """Per-consumer overrides for quota settings. See google/api/quota.proto for
-  the corresponding service configuration which provides the default values.
+  r"""Per-consumer overrides for quota settings. See google/api/quota.proto
+  for the corresponding service configuration which provides the default
+  values.
 
   Messages:
     ConsumerOverridesValue: Quota overrides set by the consumer. Consumer
@@ -2282,7 +2284,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ConsumerOverridesValue(_messages.Message):
-    """Quota overrides set by the consumer. Consumer overrides will only have
+    r"""Quota overrides set by the consumer. Consumer overrides will only have
     an effect up to the max_limit specified in the service config, or the the
     producer override, if one exists.  The key for this map is one of the
     following:  - '<GROUP_NAME>/<LIMIT_NAME>' for quotas defined within quota
@@ -2303,7 +2305,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ConsumerOverridesValue object.
+      r"""An additional property for a ConsumerOverridesValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2317,7 +2319,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class EffectiveQuotasValue(_messages.Message):
-    """The effective quota limits for each group, derived from the service
+    r"""The effective quota limits for each group, derived from the service
     defaults together with any producer or consumer overrides. For each limit,
     the effective value is the minimum of the producer and consumer overrides
     if either is present, or else the service default if neither is present.
@@ -2332,7 +2334,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a EffectiveQuotasValue object.
+      r"""An additional property for a EffectiveQuotasValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2346,7 +2348,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ProducerOverridesValue(_messages.Message):
-    """Quota overrides set by the producer. Note that if a consumer override
+    r"""Quota overrides set by the producer. Note that if a consumer override
     is also specified, then the minimum of the two will be used. This allows
     consumers to cap their usage voluntarily.  The key for this map is one of
     the following:  - '<GROUP_NAME>/<LIMIT_NAME>' for quotas defined within
@@ -2367,7 +2369,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ProducerOverridesValue object.
+      r"""An additional property for a ProducerOverridesValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2387,7 +2389,7 @@
 
 
 class QuotaUsage(_messages.Message):
-  """Specifies the used quota amount for a quota limit at a particular time.
+  r"""Specifies the used quota amount for a quota limit at a particular time.
 
   Fields:
     endTime: The time the quota duration ended.
@@ -2403,7 +2405,7 @@
 
 
 class Service(_messages.Message):
-  """`Service` is the root object of the configuration schema. It describes
+  r"""`Service` is the root object of the configuration schema. It describes
   basic information like the name of the service and the exposed API
   interfaces, and delegates other aspects to configuration sub-sections.
   Example:      type: google.api.Service     config_version: 1     name:
@@ -2498,7 +2500,7 @@
 
 
 class ServiceAccessList(_messages.Message):
-  """List of users and groups that are granted access to a service or
+  r"""List of users and groups that are granted access to a service or
   visibility label.
 
   Fields:
@@ -2515,7 +2517,7 @@
 
 
 class ServiceAccessPolicy(_messages.Message):
-  """Policy describing who can access a service and any visibility labels on
+  r"""Policy describing who can access a service and any visibility labels on
   that service.
 
   Messages:
@@ -2535,9 +2537,9 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class VisibilityLabelAccessListsValue(_messages.Message):
-    """ACLs for access to restricted parts of the service.  The map key is the
-    visibility label that is being controlled.  Note that access to any label
-    also implies access to the unrestricted surface.
+    r"""ACLs for access to restricted parts of the service.  The map key is
+    the visibility label that is being controlled.  Note that access to any
+    label also implies access to the unrestricted surface.
 
     Messages:
       AdditionalProperty: An additional property for a
@@ -2549,7 +2551,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a VisibilityLabelAccessListsValue object.
+      r"""An additional property for a VisibilityLabelAccessListsValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2567,7 +2569,7 @@
 
 
 class ServicemanagementOperationsGetRequest(_messages.Message):
-  """A ServicemanagementOperationsGetRequest object.
+  r"""A ServicemanagementOperationsGetRequest object.
 
   Fields:
     operationsId: Part of `name`. The name of the operation resource.
@@ -2577,7 +2579,7 @@
 
 
 class ServicemanagementServicesAccessPolicyQueryRequest(_messages.Message):
-  """A ServicemanagementServicesAccessPolicyQueryRequest object.
+  r"""A ServicemanagementServicesAccessPolicyQueryRequest object.
 
   Fields:
     serviceName: The service to query access for.
@@ -2589,7 +2591,7 @@
 
 
 class ServicemanagementServicesConfigsCreateRequest(_messages.Message):
-  """A ServicemanagementServicesConfigsCreateRequest object.
+  r"""A ServicemanagementServicesConfigsCreateRequest object.
 
   Fields:
     service: A Service resource to be passed as the request body.
@@ -2602,7 +2604,7 @@
 
 
 class ServicemanagementServicesConfigsGetRequest(_messages.Message):
-  """A ServicemanagementServicesConfigsGetRequest object.
+  r"""A ServicemanagementServicesConfigsGetRequest object.
 
   Fields:
     configId: The id of the service config resource. Optional. If it is not
@@ -2616,7 +2618,7 @@
 
 
 class ServicemanagementServicesConfigsListRequest(_messages.Message):
-  """A ServicemanagementServicesConfigsListRequest object.
+  r"""A ServicemanagementServicesConfigsListRequest object.
 
   Fields:
     pageSize: The max number of items to include in the response list.
@@ -2631,7 +2633,7 @@
 
 
 class ServicemanagementServicesConfigsSubmitRequest(_messages.Message):
-  """A ServicemanagementServicesConfigsSubmitRequest object.
+  r"""A ServicemanagementServicesConfigsSubmitRequest object.
 
   Fields:
     serviceName: The name of the service.  See the `ServiceManager` overview
@@ -2645,7 +2647,7 @@
 
 
 class ServicemanagementServicesCustomerSettingsGetRequest(_messages.Message):
-  """A ServicemanagementServicesCustomerSettingsGetRequest object.
+  r"""A ServicemanagementServicesCustomerSettingsGetRequest object.
 
   Enums:
     ViewValueValuesEnum: Request only fields for the specified view.
@@ -2662,7 +2664,7 @@
   """
 
   class ViewValueValuesEnum(_messages.Enum):
-    """Request only fields for the specified view.
+    r"""Request only fields for the specified view.
 
     Values:
       PROJECT_SETTINGS_VIEW_UNSPECIFIED: <no description>
@@ -2682,7 +2684,7 @@
 
 
 class ServicemanagementServicesCustomerSettingsPatchRequest(_messages.Message):
-  """A ServicemanagementServicesCustomerSettingsPatchRequest object.
+  r"""A ServicemanagementServicesCustomerSettingsPatchRequest object.
 
   Fields:
     customerId: ID for the customer. See the comment for
@@ -2703,7 +2705,7 @@
 
 
 class ServicemanagementServicesDeleteRequest(_messages.Message):
-  """A ServicemanagementServicesDeleteRequest object.
+  r"""A ServicemanagementServicesDeleteRequest object.
 
   Fields:
     serviceName: The name of the service.  See the `ServiceManager` overview
@@ -2714,7 +2716,7 @@
 
 
 class ServicemanagementServicesDisableRequest(_messages.Message):
-  """A ServicemanagementServicesDisableRequest object.
+  r"""A ServicemanagementServicesDisableRequest object.
 
   Fields:
     disableServiceRequest: A DisableServiceRequest resource to be passed as
@@ -2728,7 +2730,7 @@
 
 
 class ServicemanagementServicesEnableRequest(_messages.Message):
-  """A ServicemanagementServicesEnableRequest object.
+  r"""A ServicemanagementServicesEnableRequest object.
 
   Fields:
     enableServiceRequest: A EnableServiceRequest resource to be passed as the
@@ -2742,7 +2744,7 @@
 
 
 class ServicemanagementServicesGetAccessPolicyRequest(_messages.Message):
-  """A ServicemanagementServicesGetAccessPolicyRequest object.
+  r"""A ServicemanagementServicesGetAccessPolicyRequest object.
 
   Fields:
     serviceName: The name of the service.  For example:
@@ -2753,7 +2755,7 @@
 
 
 class ServicemanagementServicesGetConfigRequest(_messages.Message):
-  """A ServicemanagementServicesGetConfigRequest object.
+  r"""A ServicemanagementServicesGetConfigRequest object.
 
   Fields:
     configId: The id of the service config resource. Optional. If it is not
@@ -2767,7 +2769,7 @@
 
 
 class ServicemanagementServicesGetRequest(_messages.Message):
-  """A ServicemanagementServicesGetRequest object.
+  r"""A ServicemanagementServicesGetRequest object.
 
   Enums:
     ViewValueValuesEnum: If project_settings is expanded, request only fields
@@ -2788,7 +2790,7 @@
   """
 
   class ViewValueValuesEnum(_messages.Enum):
-    """If project_settings is expanded, request only fields for the specified
+    r"""If project_settings is expanded, request only fields for the specified
     view.
 
     Values:
@@ -2809,7 +2811,7 @@
 
 
 class ServicemanagementServicesListRequest(_messages.Message):
-  """A ServicemanagementServicesListRequest object.
+  r"""A ServicemanagementServicesListRequest object.
 
   Fields:
     category: Include services only in the specified category. Supported
@@ -2837,7 +2839,7 @@
 
 
 class ServicemanagementServicesPatchConfigRequest(_messages.Message):
-  """A ServicemanagementServicesPatchConfigRequest object.
+  r"""A ServicemanagementServicesPatchConfigRequest object.
 
   Fields:
     service: A Service resource to be passed as the request body.
@@ -2852,7 +2854,7 @@
 
 
 class ServicemanagementServicesPatchRequest(_messages.Message):
-  """A ServicemanagementServicesPatchRequest object.
+  r"""A ServicemanagementServicesPatchRequest object.
 
   Fields:
     managedService: A ManagedService resource to be passed as the request
@@ -2868,7 +2870,7 @@
 
 
 class ServicemanagementServicesProjectSettingsGetRequest(_messages.Message):
-  """A ServicemanagementServicesProjectSettingsGetRequest object.
+  r"""A ServicemanagementServicesProjectSettingsGetRequest object.
 
   Enums:
     ViewValueValuesEnum: Request only the fields for the specified view.
@@ -2883,7 +2885,7 @@
   """
 
   class ViewValueValuesEnum(_messages.Enum):
-    """Request only the fields for the specified view.
+    r"""Request only the fields for the specified view.
 
     Values:
       PROJECT_SETTINGS_VIEW_UNSPECIFIED: <no description>
@@ -2903,7 +2905,7 @@
 
 
 class ServicemanagementServicesProjectSettingsPatchRequest(_messages.Message):
-  """A ServicemanagementServicesProjectSettingsPatchRequest object.
+  r"""A ServicemanagementServicesProjectSettingsPatchRequest object.
 
   Fields:
     consumerProjectId: The project ID of the consumer.
@@ -2921,7 +2923,7 @@
 
 
 class ServicemanagementServicesUpdateConfigRequest(_messages.Message):
-  """A ServicemanagementServicesUpdateConfigRequest object.
+  r"""A ServicemanagementServicesUpdateConfigRequest object.
 
   Fields:
     service: A Service resource to be passed as the request body.
@@ -2938,7 +2940,7 @@
 
 
 class ServicemanagementServicesUpdateRequest(_messages.Message):
-  """A ServicemanagementServicesUpdateRequest object.
+  r"""A ServicemanagementServicesUpdateRequest object.
 
   Fields:
     managedService: A ManagedService resource to be passed as the request
@@ -2956,7 +2958,7 @@
 
 
 class SourceContext(_messages.Message):
-  """`SourceContext` represents information about the source of a protobuf
+  r"""`SourceContext` represents information about the source of a protobuf
   element, like the file in which it is defined.
 
   Fields:
@@ -2969,7 +2971,7 @@
 
 
 class StandardQueryParameters(_messages.Message):
-  """Query parameters accepted by all methods.
+  r"""Query parameters accepted by all methods.
 
   Enums:
     FXgafvValueValuesEnum: V1 error format.
@@ -2998,7 +3000,7 @@
   """
 
   class AltValueValuesEnum(_messages.Enum):
-    """Data format for response.
+    r"""Data format for response.
 
     Values:
       json: Responses with Content-Type of application/json
@@ -3010,7 +3012,7 @@
     proto = 2
 
   class FXgafvValueValuesEnum(_messages.Enum):
-    """V1 error format.
+    r"""V1 error format.
 
     Values:
       _1: v1 error format
@@ -3036,7 +3038,7 @@
 
 
 class Status(_messages.Message):
-  """The `Status` type defines a logical error model that is suitable for
+  r"""The `Status` type defines a logical error model that is suitable for
   different programming environments, including REST APIs and RPC APIs. It is
   used by [gRPC](https://github.com/grpc). The error model is designed to be:
   - Simple to use and understand for most users - Flexible enough to meet
@@ -3084,7 +3086,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class DetailsValueListEntry(_messages.Message):
-    """A DetailsValueListEntry object.
+    r"""A DetailsValueListEntry object.
 
     Messages:
       AdditionalProperty: An additional property for a DetailsValueListEntry
@@ -3096,7 +3098,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a DetailsValueListEntry object.
+      r"""An additional property for a DetailsValueListEntry object.
 
       Fields:
         key: Name of the additional property.
@@ -3114,7 +3116,7 @@
 
 
 class Step(_messages.Message):
-  """Represents the status of one operation step.
+  r"""Represents the status of one operation step.
 
   Enums:
     StatusValueValuesEnum: The status code.
@@ -3125,7 +3127,7 @@
   """
 
   class StatusValueValuesEnum(_messages.Enum):
-    """The status code.
+    r"""The status code.
 
     Values:
       STATUS_UNSPECIFIED: Unspecifed code.
@@ -3145,7 +3147,7 @@
 
 
 class SubmitConfigSourceRequest(_messages.Message):
-  """Request message for SubmitConfigSource method.
+  r"""Request message for SubmitConfigSource method.
 
   Fields:
     configSource: The source configuration for the service.
@@ -3159,7 +3161,7 @@
 
 
 class SwaggerSpec(_messages.Message):
-  """A collection of swagger specification files.
+  r"""A collection of swagger specification files.
 
   Fields:
     swaggerFiles: The individual files.
@@ -3169,7 +3171,7 @@
 
 
 class SystemParameter(_messages.Message):
-  """Define a parameter's name and location. The parameter may be passed as
+  r"""Define a parameter's name and location. The parameter may be passed as
   either an HTTP header or a URL query parameter, and if both are passed the
   behavior is implementation-dependent.
 
@@ -3188,7 +3190,7 @@
 
 
 class SystemParameterRule(_messages.Message):
-  """Define a system parameter rule mapping system parameter definitions to
+  r"""Define a system parameter rule mapping system parameter definitions to
   methods.
 
   Fields:
@@ -3206,7 +3208,7 @@
 
 
 class SystemParameters(_messages.Message):
-  """### System parameter configuration  A system parameter is a special kind
+  r"""### System parameter configuration  A system parameter is a special kind
   of parameter defined by the API system, not by an individual API. It is
   typically mapped to an HTTP header and/or a URL query parameter. This
   configuration specifies which methods change the names of the system
@@ -3230,7 +3232,7 @@
 
 
 class Type(_messages.Message):
-  """A protocol buffer message type.
+  r"""A protocol buffer message type.
 
   Enums:
     SyntaxValueValuesEnum: The source syntax.
@@ -3245,7 +3247,7 @@
   """
 
   class SyntaxValueValuesEnum(_messages.Enum):
-    """The source syntax.
+    r"""The source syntax.
 
     Values:
       SYNTAX_PROTO2: Syntax `proto2`.
@@ -3263,7 +3265,7 @@
 
 
 class Usage(_messages.Message):
-  """Configuration controlling usage of a service.
+  r"""Configuration controlling usage of a service.
 
   Enums:
     ServiceAccessValueValuesEnum: Controls which users can see or activate the
@@ -3294,7 +3296,7 @@
   """
 
   class ServiceAccessValueValuesEnum(_messages.Enum):
-    """Controls which users can see or activate the service.
+    r"""Controls which users can see or activate the service.
 
     Values:
       RESTRICTED: The service can only be seen/used by users identified in the
@@ -3329,7 +3331,7 @@
 
 
 class UsageRule(_messages.Message):
-  """Usage configuration rules for the service.  NOTE: Under development.
+  r"""Usage configuration rules for the service.  NOTE: Under development.
   Use this rule to configure unregistered calls for the service. Unregistered
   calls are calls that do not contain consumer project identity. (Example:
   calls that do not contain an API key). By default, API methods do not allow
@@ -3354,7 +3356,7 @@
 
 
 class UsageSettings(_messages.Message):
-  """Usage settings for a consumer of a service.
+  r"""Usage settings for a consumer of a service.
 
   Enums:
     ConsumerEnableStatusValueValuesEnum: Consumer controlled setting to
@@ -3368,7 +3370,7 @@
   """
 
   class ConsumerEnableStatusValueValuesEnum(_messages.Enum):
-    """Consumer controlled setting to enable/disable use of this service by
+    r"""Consumer controlled setting to enable/disable use of this service by
     the consumer project. The default value of this is controlled by the
     service configuration.
 
@@ -3383,7 +3385,7 @@
 
 
 class VariableTermQuota(_messages.Message):
-  """A variable term quota is a bucket of tokens that is consumed over a
+  r"""A variable term quota is a bucket of tokens that is consumed over a
   specified (usually long) time period. When present, it overrides any "1d"
   duration per-project quota specified on the group.  Variable terms run from
   midnight to midnight, start_date to end_date (inclusive) in the
@@ -3418,17 +3420,18 @@
 
 
 class Visibility(_messages.Message):
-  """`Visibility` defines restrictions for the visibility of service elements.
-  Restrictions are specified using visibility labels (e.g., TRUSTED_TESTER)
-  that are elsewhere linked to users and projects.  Users and projects can
-  have access to more than one visibility label. The effective visibility for
-  multiple labels is the union of each label's elements, plus any unrestricted
-  elements.  If an element and its parents have no restrictions, visibility is
-  unconditionally granted.  Example:      visibility:       rules:       -
-  selector: google.calendar.Calendar.EnhancedSearch         restriction:
-  TRUSTED_TESTER       - selector: google.calendar.Calendar.Delegate
-  restriction: GOOGLE_INTERNAL  Here, all methods are publicly visible except
-  for the restricted methods EnhancedSearch and Delegate.
+  r"""`Visibility` defines restrictions for the visibility of service
+  elements.  Restrictions are specified using visibility labels (e.g.,
+  TRUSTED_TESTER) that are elsewhere linked to users and projects.  Users and
+  projects can have access to more than one visibility label. The effective
+  visibility for multiple labels is the union of each label's elements, plus
+  any unrestricted elements.  If an element and its parents have no
+  restrictions, visibility is unconditionally granted.  Example:
+  visibility:       rules:       - selector:
+  google.calendar.Calendar.EnhancedSearch         restriction: TRUSTED_TESTER
+  - selector: google.calendar.Calendar.Delegate         restriction:
+  GOOGLE_INTERNAL  Here, all methods are publicly visible except for the
+  restricted methods EnhancedSearch and Delegate.
 
   Fields:
     enforceRuntimeVisibility: Controls whether visibility rules are enforced
@@ -3448,8 +3451,8 @@
 
 
 class VisibilityRule(_messages.Message):
-  """A visibility rule provides visibility configuration for an individual API
-  element.
+  r"""A visibility rule provides visibility configuration for an individual
+  API element.
 
   Fields:
     enforceRuntimeVisibility: Controls whether visibility is enforced at
@@ -3477,7 +3480,7 @@
 
 
 class VisibilitySettings(_messages.Message):
-  """Settings that control which features of the service are visible to the
+  r"""Settings that control which features of the service are visible to the
   consumer project.
 
   Fields:
@@ -3495,11 +3498,8 @@
 
 
 encoding.AddCustomJsonFieldMapping(
-    StandardQueryParameters, 'f__xgafv', '$.xgafv',
-    package=u'servicemanagement')
+    StandardQueryParameters, 'f__xgafv', '$.xgafv')
 encoding.AddCustomJsonEnumMapping(
-    StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1',
-    package=u'servicemanagement')
+    StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
 encoding.AddCustomJsonEnumMapping(
-    StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2',
-    package=u'servicemanagement')
+    StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
diff --git a/samples/storage_sample/downloads_test.py b/samples/storage_sample/downloads_test.py
index a51cd95..9f4339e 100644
--- a/samples/storage_sample/downloads_test.py
+++ b/samples/storage_sample/downloads_test.py
@@ -25,7 +25,7 @@
 
 import six
 
-import apitools.base.py as apitools_base
+from apitools.base.py import exceptions
 import storage
 
 _CLIENT = None
@@ -81,7 +81,7 @@
 
     def testObjectDoesNotExist(self):
         self.__ResetDownload(auto_transfer=True)
-        with self.assertRaises(apitools_base.HttpError):
+        with self.assertRaises(exceptions.HttpError):
             self.__GetFile(self.__GetRequest('nonexistent_file'))
 
     def testAutoTransfer(self):
@@ -171,7 +171,7 @@
     def testSerializedDownload(self):
 
         def _ProgressCallback(unused_response, download_object):
-            print 'Progress %s' % download_object.progress
+            print('Progress %s' % download_object.progress)
 
         file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
         object_name = os.path.join(self._TESTDATA_PREFIX, 'fifteen_byte_file')
diff --git a/samples/storage_sample/storage_v1.json b/samples/storage_sample/storage_v1.json
index ff0b909..2636bda 100644
--- a/samples/storage_sample/storage_v1.json
+++ b/samples/storage_sample/storage_v1.json
@@ -22,7 +22,7 @@
  "basePath": "/storage/v1/",
  "rootUrl": "https://www.googleapis.com/",
  "servicePath": "storage/v1/",
- "batchPath": "batch",
+ "batchPath": "batch/storage/v1",
  "parameters": {
   "alt": {
    "type": "string",
diff --git a/samples/storage_sample/storage_v1/storage_v1.py b/samples/storage_sample/storage_v1/storage_v1.py
deleted file mode 100644
index d7cff48..0000000
--- a/samples/storage_sample/storage_v1/storage_v1.py
+++ /dev/null
@@ -1,3578 +0,0 @@
-#!/usr/bin/env python
-"""CLI for storage, version v1."""
-# NOTE: This file is autogenerated and should not be edited by hand.
-
-import code
-import os
-import platform
-import sys
-
-from apitools.base.protorpclite import message_types
-from apitools.base.protorpclite import messages
-
-from google.apputils import appcommands
-import gflags as flags
-
-import apitools.base.py as apitools_base
-from apitools.base.py import cli as apitools_base_cli
-import storage_v1_client as client_lib
-import storage_v1_messages as messages
-
-
-def _DeclareStorageFlags():
-  """Declare global flags in an idempotent way."""
-  if 'api_endpoint' in flags.FLAGS:
-    return
-  flags.DEFINE_string(
-      'api_endpoint',
-      u'https://www.googleapis.com/storage/v1/',
-      'URL of the API endpoint to use.',
-      short_name='storage_url')
-  flags.DEFINE_string(
-      'history_file',
-      u'~/.storage.v1.history',
-      'File with interactive shell history.')
-  flags.DEFINE_multistring(
-      'add_header', [],
-      'Additional http headers (as key=value strings). '
-      'Can be specified multiple times.')
-  flags.DEFINE_string(
-      'service_account_json_keyfile', '',
-      'Filename for a JSON service account key downloaded'
-      ' from the Developer Console.')
-  flags.DEFINE_enum(
-      'alt',
-      u'json',
-      [u'json'],
-      u'Data format for the response.')
-  flags.DEFINE_string(
-      'fields',
-      None,
-      u'Selector specifying which fields to include in a partial response.')
-  flags.DEFINE_string(
-      'key',
-      None,
-      u'API key. Your API key identifies your project and provides you with '
-      u'API access, quota, and reports. Required unless you provide an OAuth '
-      u'2.0 token.')
-  flags.DEFINE_string(
-      'oauth_token',
-      None,
-      u'OAuth 2.0 token for the current user.')
-  flags.DEFINE_boolean(
-      'prettyPrint',
-      'True',
-      u'Returns response with indentations and line breaks.')
-  flags.DEFINE_string(
-      'quotaUser',
-      None,
-      u'Available to use for quota purposes for server-side applications. Can'
-      u' be any arbitrary string assigned to a user, but should not exceed 40'
-      u' characters. Overrides userIp if both are provided.')
-  flags.DEFINE_string(
-      'trace',
-      None,
-      'A tracing token of the form "token:<tokenid>" to include in api '
-      'requests.')
-  flags.DEFINE_string(
-      'userIp',
-      None,
-      u'IP address of the site where the request originates. Use this if you '
-      u'want to enforce per-user limits.')
-
-
-FLAGS = flags.FLAGS
-apitools_base_cli.DeclareBaseFlags()
-_DeclareStorageFlags()
-
-
-def GetGlobalParamsFromFlags():
-  """Return a StandardQueryParameters based on flags."""
-  result = messages.StandardQueryParameters()
-  if FLAGS['alt'].present:
-    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
-  if FLAGS['fields'].present:
-    result.fields = FLAGS.fields.decode('utf8')
-  if FLAGS['key'].present:
-    result.key = FLAGS.key.decode('utf8')
-  if FLAGS['oauth_token'].present:
-    result.oauth_token = FLAGS.oauth_token.decode('utf8')
-  if FLAGS['prettyPrint'].present:
-    result.prettyPrint = FLAGS.prettyPrint
-  if FLAGS['quotaUser'].present:
-    result.quotaUser = FLAGS.quotaUser.decode('utf8')
-  if FLAGS['trace'].present:
-    result.trace = FLAGS.trace.decode('utf8')
-  if FLAGS['userIp'].present:
-    result.userIp = FLAGS.userIp.decode('utf8')
-  return result
-
-
-def GetClientFromFlags():
-  """Return a client object, configured from flags."""
-  log_request = FLAGS.log_request or FLAGS.log_request_response
-  log_response = FLAGS.log_response or FLAGS.log_request_response
-  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
-  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
-  credentials_args = {
-      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
-  }
-  try:
-    client = client_lib.StorageV1(
-        api_endpoint, log_request=log_request,
-        log_response=log_response,
-        credentials_args=credentials_args,
-        additional_http_headers=additional_http_headers)
-  except apitools_base.CredentialsError as e:
-    print 'Error creating credentials: %s' % e
-    sys.exit(1)
-  return client
-
-
-class PyShell(appcommands.Cmd):
-
-  def Run(self, _):
-    """Run an interactive python shell with the client."""
-    client = GetClientFromFlags()
-    params = GetGlobalParamsFromFlags()
-    for field in params.all_fields():
-      value = params.get_assigned_value(field.name)
-      if value != field.default:
-        client.AddGlobalParam(field.name, value)
-    banner = """
-           == storage interactive console ==
-                 client: a storage client
-          apitools_base: base apitools module
-         messages: the generated messages module
-    """
-    local_vars = {
-        'apitools_base': apitools_base,
-        'client': client,
-        'client_lib': client_lib,
-        'messages': messages,
-    }
-    if platform.system() == 'Linux':
-      console = apitools_base_cli.ConsoleWithReadline(
-          local_vars, histfile=FLAGS.history_file)
-    else:
-      console = code.InteractiveConsole(local_vars)
-    try:
-      console.interact(banner)
-    except SystemExit as e:
-      return e.code
-
-
-class BucketAccessControlsDelete(apitools_base_cli.NewCmd):
-  """Command wrapping bucketAccessControls.Delete."""
-
-  usage = """bucketAccessControls_delete <bucket> <entity>"""
-
-  def __init__(self, name, fv):
-    super(BucketAccessControlsDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, bucket, entity):
-    """Permanently deletes the ACL entry for the specified entity on the
-    specified bucket.
-
-    Args:
-      bucket: Name of a bucket.
-      entity: The entity holding the permission. Can be user-userId, user-
-        emailAddress, group-groupId, group-emailAddress, allUsers, or
-        allAuthenticatedUsers.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketAccessControlsDeleteRequest(
-        bucket=bucket.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    result = client.bucketAccessControls.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketAccessControlsGet(apitools_base_cli.NewCmd):
-  """Command wrapping bucketAccessControls.Get."""
-
-  usage = """bucketAccessControls_get <bucket> <entity>"""
-
-  def __init__(self, name, fv):
-    super(BucketAccessControlsGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, bucket, entity):
-    """Returns the ACL entry for the specified entity on the specified bucket.
-
-    Args:
-      bucket: Name of a bucket.
-      entity: The entity holding the permission. Can be user-userId, user-
-        emailAddress, group-groupId, group-emailAddress, allUsers, or
-        allAuthenticatedUsers.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketAccessControlsGetRequest(
-        bucket=bucket.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    result = client.bucketAccessControls.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketAccessControlsInsert(apitools_base_cli.NewCmd):
-  """Command wrapping bucketAccessControls.Insert."""
-
-  usage = """bucketAccessControls_insert <bucket>"""
-
-  def __init__(self, name, fv):
-    super(BucketAccessControlsInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'domain',
-        None,
-        u'The domain associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'email',
-        None,
-        u'The email address associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'entity',
-        None,
-        u'The entity holding the permission, in one of the following forms:  '
-        u'- user-userId  - user-email  - group-groupId  - group-email  - '
-        u'domain-domain  - project-team-projectId  - allUsers  - '
-        u'allAuthenticatedUsers Examples:  - The user liz@example.com would '
-        u'be user-liz@example.com.  - The group example@googlegroups.com '
-        u'would be group-example@googlegroups.com.  - To refer to all members'
-        u' of the Google Apps for Business domain example.com, the entity '
-        u'would be domain-example.com.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'entityId',
-        None,
-        u'The ID for the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'etag',
-        None,
-        u'HTTP 1.1 Entity tag for the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'id',
-        None,
-        u'The ID of the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'storage#bucketAccessControl',
-        u'The kind of item this is. For bucket access control entries, this '
-        u'is always storage#bucketAccessControl.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'projectTeam',
-        None,
-        u'The project team associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'role',
-        None,
-        u'The access permission for the entity. Can be READER, WRITER, or '
-        u'OWNER.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'selfLink',
-        None,
-        u'The link to this access-control entry.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Creates a new ACL entry on the specified bucket.
-
-    Args:
-      bucket: The name of the bucket.
-
-    Flags:
-      domain: The domain associated with the entity, if any.
-      email: The email address associated with the entity, if any.
-      entity: The entity holding the permission, in one of the following
-        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
-        domain-domain  - project-team-projectId  - allUsers  -
-        allAuthenticatedUsers Examples:  - The user liz@example.com would be
-        user-liz@example.com.  - The group example@googlegroups.com would be
-        group-example@googlegroups.com.  - To refer to all members of the
-        Google Apps for Business domain example.com, the entity would be
-        domain-example.com.
-      entityId: The ID for the entity, if any.
-      etag: HTTP 1.1 Entity tag for the access-control entry.
-      id: The ID of the access-control entry.
-      kind: The kind of item this is. For bucket access control entries, this
-        is always storage#bucketAccessControl.
-      projectTeam: The project team associated with the entity, if any.
-      role: The access permission for the entity. Can be READER, WRITER, or
-        OWNER.
-      selfLink: The link to this access-control entry.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BucketAccessControl(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['domain'].present:
-      request.domain = FLAGS.domain.decode('utf8')
-    if FLAGS['email'].present:
-      request.email = FLAGS.email.decode('utf8')
-    if FLAGS['entity'].present:
-      request.entity = FLAGS.entity.decode('utf8')
-    if FLAGS['entityId'].present:
-      request.entityId = FLAGS.entityId.decode('utf8')
-    if FLAGS['etag'].present:
-      request.etag = FLAGS.etag.decode('utf8')
-    if FLAGS['id'].present:
-      request.id = FLAGS.id.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['projectTeam'].present:
-      request.projectTeam = apitools_base.JsonToMessage(messages.BucketAccessControl.ProjectTeamValue, FLAGS.projectTeam)
-    if FLAGS['role'].present:
-      request.role = FLAGS.role.decode('utf8')
-    if FLAGS['selfLink'].present:
-      request.selfLink = FLAGS.selfLink.decode('utf8')
-    result = client.bucketAccessControls.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketAccessControlsList(apitools_base_cli.NewCmd):
-  """Command wrapping bucketAccessControls.List."""
-
-  usage = """bucketAccessControls_list <bucket>"""
-
-  def __init__(self, name, fv):
-    super(BucketAccessControlsList, self).__init__(name, fv)
-
-  def RunWithArgs(self, bucket):
-    """Retrieves ACL entries on the specified bucket.
-
-    Args:
-      bucket: Name of a bucket.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketAccessControlsListRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    result = client.bucketAccessControls.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketAccessControlsPatch(apitools_base_cli.NewCmd):
-  """Command wrapping bucketAccessControls.Patch."""
-
-  usage = """bucketAccessControls_patch <bucket> <entity>"""
-
-  def __init__(self, name, fv):
-    super(BucketAccessControlsPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'domain',
-        None,
-        u'The domain associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'email',
-        None,
-        u'The email address associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'entityId',
-        None,
-        u'The ID for the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'etag',
-        None,
-        u'HTTP 1.1 Entity tag for the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'id',
-        None,
-        u'The ID of the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'storage#bucketAccessControl',
-        u'The kind of item this is. For bucket access control entries, this '
-        u'is always storage#bucketAccessControl.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'projectTeam',
-        None,
-        u'The project team associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'role',
-        None,
-        u'The access permission for the entity. Can be READER, WRITER, or '
-        u'OWNER.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'selfLink',
-        None,
-        u'The link to this access-control entry.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, entity):
-    """Updates an ACL entry on the specified bucket. This method supports
-    patch semantics.
-
-    Args:
-      bucket: The name of the bucket.
-      entity: The entity holding the permission, in one of the following
-        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
-        domain-domain  - project-team-projectId  - allUsers  -
-        allAuthenticatedUsers Examples:  - The user liz@example.com would be
-        user-liz@example.com.  - The group example@googlegroups.com would be
-        group-example@googlegroups.com.  - To refer to all members of the
-        Google Apps for Business domain example.com, the entity would be
-        domain-example.com.
-
-    Flags:
-      domain: The domain associated with the entity, if any.
-      email: The email address associated with the entity, if any.
-      entityId: The ID for the entity, if any.
-      etag: HTTP 1.1 Entity tag for the access-control entry.
-      id: The ID of the access-control entry.
-      kind: The kind of item this is. For bucket access control entries, this
-        is always storage#bucketAccessControl.
-      projectTeam: The project team associated with the entity, if any.
-      role: The access permission for the entity. Can be READER, WRITER, or
-        OWNER.
-      selfLink: The link to this access-control entry.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BucketAccessControl(
-        bucket=bucket.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    if FLAGS['domain'].present:
-      request.domain = FLAGS.domain.decode('utf8')
-    if FLAGS['email'].present:
-      request.email = FLAGS.email.decode('utf8')
-    if FLAGS['entityId'].present:
-      request.entityId = FLAGS.entityId.decode('utf8')
-    if FLAGS['etag'].present:
-      request.etag = FLAGS.etag.decode('utf8')
-    if FLAGS['id'].present:
-      request.id = FLAGS.id.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['projectTeam'].present:
-      request.projectTeam = apitools_base.JsonToMessage(messages.BucketAccessControl.ProjectTeamValue, FLAGS.projectTeam)
-    if FLAGS['role'].present:
-      request.role = FLAGS.role.decode('utf8')
-    if FLAGS['selfLink'].present:
-      request.selfLink = FLAGS.selfLink.decode('utf8')
-    result = client.bucketAccessControls.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketAccessControlsUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping bucketAccessControls.Update."""
-
-  usage = """bucketAccessControls_update <bucket> <entity>"""
-
-  def __init__(self, name, fv):
-    super(BucketAccessControlsUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'domain',
-        None,
-        u'The domain associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'email',
-        None,
-        u'The email address associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'entityId',
-        None,
-        u'The ID for the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'etag',
-        None,
-        u'HTTP 1.1 Entity tag for the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'id',
-        None,
-        u'The ID of the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'storage#bucketAccessControl',
-        u'The kind of item this is. For bucket access control entries, this '
-        u'is always storage#bucketAccessControl.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'projectTeam',
-        None,
-        u'The project team associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'role',
-        None,
-        u'The access permission for the entity. Can be READER, WRITER, or '
-        u'OWNER.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'selfLink',
-        None,
-        u'The link to this access-control entry.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, entity):
-    """Updates an ACL entry on the specified bucket.
-
-    Args:
-      bucket: The name of the bucket.
-      entity: The entity holding the permission, in one of the following
-        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
-        domain-domain  - project-team-projectId  - allUsers  -
-        allAuthenticatedUsers Examples:  - The user liz@example.com would be
-        user-liz@example.com.  - The group example@googlegroups.com would be
-        group-example@googlegroups.com.  - To refer to all members of the
-        Google Apps for Business domain example.com, the entity would be
-        domain-example.com.
-
-    Flags:
-      domain: The domain associated with the entity, if any.
-      email: The email address associated with the entity, if any.
-      entityId: The ID for the entity, if any.
-      etag: HTTP 1.1 Entity tag for the access-control entry.
-      id: The ID of the access-control entry.
-      kind: The kind of item this is. For bucket access control entries, this
-        is always storage#bucketAccessControl.
-      projectTeam: The project team associated with the entity, if any.
-      role: The access permission for the entity. Can be READER, WRITER, or
-        OWNER.
-      selfLink: The link to this access-control entry.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.BucketAccessControl(
-        bucket=bucket.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    if FLAGS['domain'].present:
-      request.domain = FLAGS.domain.decode('utf8')
-    if FLAGS['email'].present:
-      request.email = FLAGS.email.decode('utf8')
-    if FLAGS['entityId'].present:
-      request.entityId = FLAGS.entityId.decode('utf8')
-    if FLAGS['etag'].present:
-      request.etag = FLAGS.etag.decode('utf8')
-    if FLAGS['id'].present:
-      request.id = FLAGS.id.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['projectTeam'].present:
-      request.projectTeam = apitools_base.JsonToMessage(messages.BucketAccessControl.ProjectTeamValue, FLAGS.projectTeam)
-    if FLAGS['role'].present:
-      request.role = FLAGS.role.decode('utf8')
-    if FLAGS['selfLink'].present:
-      request.selfLink = FLAGS.selfLink.decode('utf8')
-    result = client.bucketAccessControls.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketsDelete(apitools_base_cli.NewCmd):
-  """Command wrapping buckets.Delete."""
-
-  usage = """buckets_delete <bucket>"""
-
-  def __init__(self, name, fv):
-    super(BucketsDelete, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u'If set, only deletes the bucket if its metageneration matches this '
-        u'value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u'If set, only deletes the bucket if its metageneration does not '
-        u'match this value.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Permanently deletes an empty bucket.
-
-    Args:
-      bucket: Name of a bucket.
-
-    Flags:
-      ifMetagenerationMatch: If set, only deletes the bucket if its
-        metageneration matches this value.
-      ifMetagenerationNotMatch: If set, only deletes the bucket if its
-        metageneration does not match this value.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketsDeleteRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    result = client.buckets.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketsGet(apitools_base_cli.NewCmd):
-  """Command wrapping buckets.Get."""
-
-  usage = """buckets_get <bucket>"""
-
-  def __init__(self, name, fv):
-    super(BucketsGet, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u'Makes the return of the bucket metadata conditional on whether the '
-        u"bucket's current metageneration matches the given value.",
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u'Makes the return of the bucket metadata conditional on whether the '
-        u"bucket's current metageneration does not match the given value.",
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to noAcl.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Returns metadata for the specified bucket.
-
-    Args:
-      bucket: Name of a bucket.
-
-    Flags:
-      ifMetagenerationMatch: Makes the return of the bucket metadata
-        conditional on whether the bucket's current metageneration matches the
-        given value.
-      ifMetagenerationNotMatch: Makes the return of the bucket metadata
-        conditional on whether the bucket's current metageneration does not
-        match the given value.
-      projection: Set of properties to return. Defaults to noAcl.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketsGetRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageBucketsGetRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    result = client.buckets.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketsGetIamPolicy(apitools_base_cli.NewCmd):
-  """Command wrapping buckets.GetIamPolicy."""
-
-  usage = """buckets_getIamPolicy <bucket>"""
-
-  def __init__(self, name, fv):
-    super(BucketsGetIamPolicy, self).__init__(name, fv)
-
-  def RunWithArgs(self, bucket):
-    """Returns an IAM policy for the specified bucket.
-
-    Args:
-      bucket: Name of a bucket.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketsGetIamPolicyRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    result = client.buckets.GetIamPolicy(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketsInsert(apitools_base_cli.NewCmd):
-  """Command wrapping buckets.Insert."""
-
-  usage = """buckets_insert <project>"""
-
-  def __init__(self, name, fv):
-    super(BucketsInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'bucket',
-        None,
-        u'A Bucket resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'predefinedAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'private', u'projectPrivate', u'publicRead', u'publicReadWrite'],
-        u'Apply a predefined set of access controls to this bucket.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'predefinedDefaultObjectAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
-        u'Apply a predefined set of default object access controls to this '
-        u'bucket.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to noAcl, unless the bucket '
-        u'resource specifies acl or defaultObjectAcl properties, when it '
-        u'defaults to full.',
-        flag_values=fv)
-
-  def RunWithArgs(self, project):
-    """Creates a new bucket.
-
-    Args:
-      project: A valid API project identifier.
-
-    Flags:
-      bucket: A Bucket resource to be passed as the request body.
-      predefinedAcl: Apply a predefined set of access controls to this bucket.
-      predefinedDefaultObjectAcl: Apply a predefined set of default object
-        access controls to this bucket.
-      projection: Set of properties to return. Defaults to noAcl, unless the
-        bucket resource specifies acl or defaultObjectAcl properties, when it
-        defaults to full.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketsInsertRequest(
-        project=project.decode('utf8'),
-        )
-    if FLAGS['bucket'].present:
-      request.bucket = apitools_base.JsonToMessage(messages.Bucket, FLAGS.bucket)
-    if FLAGS['predefinedAcl'].present:
-      request.predefinedAcl = messages.StorageBucketsInsertRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
-    if FLAGS['predefinedDefaultObjectAcl'].present:
-      request.predefinedDefaultObjectAcl = messages.StorageBucketsInsertRequest.PredefinedDefaultObjectAclValueValuesEnum(FLAGS.predefinedDefaultObjectAcl)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageBucketsInsertRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    result = client.buckets.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketsList(apitools_base_cli.NewCmd):
-  """Command wrapping buckets.List."""
-
-  usage = """buckets_list <project>"""
-
-  def __init__(self, name, fv):
-    super(BucketsList, self).__init__(name, fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of buckets to return.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'A previously-returned page token representing part of the larger '
-        u'set of results to view.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'prefix',
-        None,
-        u'Filter results to buckets whose names begin with this prefix.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to noAcl.',
-        flag_values=fv)
-
-  def RunWithArgs(self, project):
-    """Retrieves a list of buckets for a given project.
-
-    Args:
-      project: A valid API project identifier.
-
-    Flags:
-      maxResults: Maximum number of buckets to return.
-      pageToken: A previously-returned page token representing part of the
-        larger set of results to view.
-      prefix: Filter results to buckets whose names begin with this prefix.
-      projection: Set of properties to return. Defaults to noAcl.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketsListRequest(
-        project=project.decode('utf8'),
-        )
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['prefix'].present:
-      request.prefix = FLAGS.prefix.decode('utf8')
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageBucketsListRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    result = client.buckets.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketsPatch(apitools_base_cli.NewCmd):
-  """Command wrapping buckets.Patch."""
-
-  usage = """buckets_patch <bucket>"""
-
-  def __init__(self, name, fv):
-    super(BucketsPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'bucketResource',
-        None,
-        u'A Bucket resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u'Makes the return of the bucket metadata conditional on whether the '
-        u"bucket's current metageneration matches the given value.",
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u'Makes the return of the bucket metadata conditional on whether the '
-        u"bucket's current metageneration does not match the given value.",
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'predefinedAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'private', u'projectPrivate', u'publicRead', u'publicReadWrite'],
-        u'Apply a predefined set of access controls to this bucket.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'predefinedDefaultObjectAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
-        u'Apply a predefined set of default object access controls to this '
-        u'bucket.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to full.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Updates a bucket. This method supports patch semantics.
-
-    Args:
-      bucket: Name of a bucket.
-
-    Flags:
-      bucketResource: A Bucket resource to be passed as the request body.
-      ifMetagenerationMatch: Makes the return of the bucket metadata
-        conditional on whether the bucket's current metageneration matches the
-        given value.
-      ifMetagenerationNotMatch: Makes the return of the bucket metadata
-        conditional on whether the bucket's current metageneration does not
-        match the given value.
-      predefinedAcl: Apply a predefined set of access controls to this bucket.
-      predefinedDefaultObjectAcl: Apply a predefined set of default object
-        access controls to this bucket.
-      projection: Set of properties to return. Defaults to full.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketsPatchRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['bucketResource'].present:
-      request.bucketResource = apitools_base.JsonToMessage(messages.Bucket, FLAGS.bucketResource)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    if FLAGS['predefinedAcl'].present:
-      request.predefinedAcl = messages.StorageBucketsPatchRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
-    if FLAGS['predefinedDefaultObjectAcl'].present:
-      request.predefinedDefaultObjectAcl = messages.StorageBucketsPatchRequest.PredefinedDefaultObjectAclValueValuesEnum(FLAGS.predefinedDefaultObjectAcl)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageBucketsPatchRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    result = client.buckets.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketsSetIamPolicy(apitools_base_cli.NewCmd):
-  """Command wrapping buckets.SetIamPolicy."""
-
-  usage = """buckets_setIamPolicy <bucket>"""
-
-  def __init__(self, name, fv):
-    super(BucketsSetIamPolicy, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'policy',
-        None,
-        u'A Policy resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Updates an IAM policy for the specified bucket.
-
-    Args:
-      bucket: Name of a bucket.
-
-    Flags:
-      policy: A Policy resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketsSetIamPolicyRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['policy'].present:
-      request.policy = apitools_base.JsonToMessage(messages.Policy, FLAGS.policy)
-    result = client.buckets.SetIamPolicy(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketsTestIamPermissions(apitools_base_cli.NewCmd):
-  """Command wrapping buckets.TestIamPermissions."""
-
-  usage = """buckets_testIamPermissions <bucket> <permissions>"""
-
-  def __init__(self, name, fv):
-    super(BucketsTestIamPermissions, self).__init__(name, fv)
-
-  def RunWithArgs(self, bucket, permissions):
-    """Tests a set of permissions on the given bucket to see which, if any,
-    are held by the caller.
-
-    Args:
-      bucket: Name of a bucket.
-      permissions: Permissions to test.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketsTestIamPermissionsRequest(
-        bucket=bucket.decode('utf8'),
-        permissions=permissions.decode('utf8'),
-        )
-    result = client.buckets.TestIamPermissions(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class BucketsUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping buckets.Update."""
-
-  usage = """buckets_update <bucket>"""
-
-  def __init__(self, name, fv):
-    super(BucketsUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'bucketResource',
-        None,
-        u'A Bucket resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u'Makes the return of the bucket metadata conditional on whether the '
-        u"bucket's current metageneration matches the given value.",
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u'Makes the return of the bucket metadata conditional on whether the '
-        u"bucket's current metageneration does not match the given value.",
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'predefinedAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'private', u'projectPrivate', u'publicRead', u'publicReadWrite'],
-        u'Apply a predefined set of access controls to this bucket.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'predefinedDefaultObjectAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
-        u'Apply a predefined set of default object access controls to this '
-        u'bucket.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to full.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Updates a bucket.
-
-    Args:
-      bucket: Name of a bucket.
-
-    Flags:
-      bucketResource: A Bucket resource to be passed as the request body.
-      ifMetagenerationMatch: Makes the return of the bucket metadata
-        conditional on whether the bucket's current metageneration matches the
-        given value.
-      ifMetagenerationNotMatch: Makes the return of the bucket metadata
-        conditional on whether the bucket's current metageneration does not
-        match the given value.
-      predefinedAcl: Apply a predefined set of access controls to this bucket.
-      predefinedDefaultObjectAcl: Apply a predefined set of default object
-        access controls to this bucket.
-      projection: Set of properties to return. Defaults to full.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageBucketsUpdateRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['bucketResource'].present:
-      request.bucketResource = apitools_base.JsonToMessage(messages.Bucket, FLAGS.bucketResource)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    if FLAGS['predefinedAcl'].present:
-      request.predefinedAcl = messages.StorageBucketsUpdateRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
-    if FLAGS['predefinedDefaultObjectAcl'].present:
-      request.predefinedDefaultObjectAcl = messages.StorageBucketsUpdateRequest.PredefinedDefaultObjectAclValueValuesEnum(FLAGS.predefinedDefaultObjectAcl)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageBucketsUpdateRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    result = client.buckets.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ChannelsStop(apitools_base_cli.NewCmd):
-  """Command wrapping channels.Stop."""
-
-  usage = """channels_stop"""
-
-  def __init__(self, name, fv):
-    super(ChannelsStop, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'address',
-        None,
-        u'The address where notifications are delivered for this channel.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'expiration',
-        None,
-        u'Date and time of notification channel expiration, expressed as a '
-        u'Unix timestamp, in milliseconds. Optional.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'id',
-        None,
-        u'A UUID or similar unique string that identifies this channel.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'api#channel',
-        u'Identifies this as a notification channel used to watch for changes'
-        u' to a resource. Value: the fixed string "api#channel".',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'params',
-        None,
-        u'Additional parameters controlling delivery channel behavior. '
-        u'Optional.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'payload',
-        None,
-        u'A Boolean value to indicate whether payload is wanted. Optional.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'resourceId',
-        None,
-        u'An opaque ID that identifies the resource being watched on this '
-        u'channel. Stable across different API versions.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'resourceUri',
-        None,
-        u'A version-specific identifier for the watched resource.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'token',
-        None,
-        u'An arbitrary string delivered to the target address with each '
-        u'notification delivered over this channel. Optional.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'type',
-        None,
-        u'The type of delivery mechanism used for this channel.',
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """Stop watching resources through this channel
-
-    Flags:
-      address: The address where notifications are delivered for this channel.
-      expiration: Date and time of notification channel expiration, expressed
-        as a Unix timestamp, in milliseconds. Optional.
-      id: A UUID or similar unique string that identifies this channel.
-      kind: Identifies this as a notification channel used to watch for
-        changes to a resource. Value: the fixed string "api#channel".
-      params: Additional parameters controlling delivery channel behavior.
-        Optional.
-      payload: A Boolean value to indicate whether payload is wanted.
-        Optional.
-      resourceId: An opaque ID that identifies the resource being watched on
-        this channel. Stable across different API versions.
-      resourceUri: A version-specific identifier for the watched resource.
-      token: An arbitrary string delivered to the target address with each
-        notification delivered over this channel. Optional.
-      type: The type of delivery mechanism used for this channel.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.Channel(
-        )
-    if FLAGS['address'].present:
-      request.address = FLAGS.address.decode('utf8')
-    if FLAGS['expiration'].present:
-      request.expiration = int(FLAGS.expiration)
-    if FLAGS['id'].present:
-      request.id = FLAGS.id.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['params'].present:
-      request.params = apitools_base.JsonToMessage(messages.Channel.ParamsValue, FLAGS.params)
-    if FLAGS['payload'].present:
-      request.payload = FLAGS.payload
-    if FLAGS['resourceId'].present:
-      request.resourceId = FLAGS.resourceId.decode('utf8')
-    if FLAGS['resourceUri'].present:
-      request.resourceUri = FLAGS.resourceUri.decode('utf8')
-    if FLAGS['token'].present:
-      request.token = FLAGS.token.decode('utf8')
-    if FLAGS['type'].present:
-      request.type = FLAGS.type.decode('utf8')
-    result = client.channels.Stop(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DefaultObjectAccessControlsDelete(apitools_base_cli.NewCmd):
-  """Command wrapping defaultObjectAccessControls.Delete."""
-
-  usage = """defaultObjectAccessControls_delete <bucket> <entity>"""
-
-  def __init__(self, name, fv):
-    super(DefaultObjectAccessControlsDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, bucket, entity):
-    """Permanently deletes the default object ACL entry for the specified
-    entity on the specified bucket.
-
-    Args:
-      bucket: Name of a bucket.
-      entity: The entity holding the permission. Can be user-userId, user-
-        emailAddress, group-groupId, group-emailAddress, allUsers, or
-        allAuthenticatedUsers.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageDefaultObjectAccessControlsDeleteRequest(
-        bucket=bucket.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    result = client.defaultObjectAccessControls.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DefaultObjectAccessControlsGet(apitools_base_cli.NewCmd):
-  """Command wrapping defaultObjectAccessControls.Get."""
-
-  usage = """defaultObjectAccessControls_get <bucket> <entity>"""
-
-  def __init__(self, name, fv):
-    super(DefaultObjectAccessControlsGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, bucket, entity):
-    """Returns the default object ACL entry for the specified entity on the
-    specified bucket.
-
-    Args:
-      bucket: Name of a bucket.
-      entity: The entity holding the permission. Can be user-userId, user-
-        emailAddress, group-groupId, group-emailAddress, allUsers, or
-        allAuthenticatedUsers.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageDefaultObjectAccessControlsGetRequest(
-        bucket=bucket.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    result = client.defaultObjectAccessControls.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DefaultObjectAccessControlsInsert(apitools_base_cli.NewCmd):
-  """Command wrapping defaultObjectAccessControls.Insert."""
-
-  usage = """defaultObjectAccessControls_insert <bucket>"""
-
-  def __init__(self, name, fv):
-    super(DefaultObjectAccessControlsInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'domain',
-        None,
-        u'The domain associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'email',
-        None,
-        u'The email address associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'entity',
-        None,
-        u'The entity holding the permission, in one of the following forms:  '
-        u'- user-userId  - user-email  - group-groupId  - group-email  - '
-        u'domain-domain  - project-team-projectId  - allUsers  - '
-        u'allAuthenticatedUsers Examples:  - The user liz@example.com would '
-        u'be user-liz@example.com.  - The group example@googlegroups.com '
-        u'would be group-example@googlegroups.com.  - To refer to all members'
-        u' of the Google Apps for Business domain example.com, the entity '
-        u'would be domain-example.com.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'entityId',
-        None,
-        u'The ID for the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'etag',
-        None,
-        u'HTTP 1.1 Entity tag for the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'The content generation of the object.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'id',
-        None,
-        u'The ID of the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'storage#objectAccessControl',
-        u'The kind of item this is. For object access control entries, this '
-        u'is always storage#objectAccessControl.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'object',
-        None,
-        u'The name of the object.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'projectTeam',
-        None,
-        u'The project team associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'role',
-        None,
-        u'The access permission for the entity. Can be READER or OWNER.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'selfLink',
-        None,
-        u'The link to this access-control entry.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Creates a new default object ACL entry on the specified bucket.
-
-    Args:
-      bucket: The name of the bucket.
-
-    Flags:
-      domain: The domain associated with the entity, if any.
-      email: The email address associated with the entity, if any.
-      entity: The entity holding the permission, in one of the following
-        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
-        domain-domain  - project-team-projectId  - allUsers  -
-        allAuthenticatedUsers Examples:  - The user liz@example.com would be
-        user-liz@example.com.  - The group example@googlegroups.com would be
-        group-example@googlegroups.com.  - To refer to all members of the
-        Google Apps for Business domain example.com, the entity would be
-        domain-example.com.
-      entityId: The ID for the entity, if any.
-      etag: HTTP 1.1 Entity tag for the access-control entry.
-      generation: The content generation of the object.
-      id: The ID of the access-control entry.
-      kind: The kind of item this is. For object access control entries, this
-        is always storage#objectAccessControl.
-      object: The name of the object.
-      projectTeam: The project team associated with the entity, if any.
-      role: The access permission for the entity. Can be READER or OWNER.
-      selfLink: The link to this access-control entry.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ObjectAccessControl(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['domain'].present:
-      request.domain = FLAGS.domain.decode('utf8')
-    if FLAGS['email'].present:
-      request.email = FLAGS.email.decode('utf8')
-    if FLAGS['entity'].present:
-      request.entity = FLAGS.entity.decode('utf8')
-    if FLAGS['entityId'].present:
-      request.entityId = FLAGS.entityId.decode('utf8')
-    if FLAGS['etag'].present:
-      request.etag = FLAGS.etag.decode('utf8')
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['id'].present:
-      request.id = FLAGS.id.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['object'].present:
-      request.object = FLAGS.object.decode('utf8')
-    if FLAGS['projectTeam'].present:
-      request.projectTeam = apitools_base.JsonToMessage(messages.ObjectAccessControl.ProjectTeamValue, FLAGS.projectTeam)
-    if FLAGS['role'].present:
-      request.role = FLAGS.role.decode('utf8')
-    if FLAGS['selfLink'].present:
-      request.selfLink = FLAGS.selfLink.decode('utf8')
-    result = client.defaultObjectAccessControls.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DefaultObjectAccessControlsList(apitools_base_cli.NewCmd):
-  """Command wrapping defaultObjectAccessControls.List."""
-
-  usage = """defaultObjectAccessControls_list <bucket>"""
-
-  def __init__(self, name, fv):
-    super(DefaultObjectAccessControlsList, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u"If present, only return default ACL listing if the bucket's current"
-        u' metageneration matches this value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u"If present, only return default ACL listing if the bucket's current"
-        u' metageneration does not match the given value.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Retrieves default object ACL entries on the specified bucket.
-
-    Args:
-      bucket: Name of a bucket.
-
-    Flags:
-      ifMetagenerationMatch: If present, only return default ACL listing if
-        the bucket's current metageneration matches this value.
-      ifMetagenerationNotMatch: If present, only return default ACL listing if
-        the bucket's current metageneration does not match the given value.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageDefaultObjectAccessControlsListRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    result = client.defaultObjectAccessControls.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DefaultObjectAccessControlsPatch(apitools_base_cli.NewCmd):
-  """Command wrapping defaultObjectAccessControls.Patch."""
-
-  usage = """defaultObjectAccessControls_patch <bucket> <entity>"""
-
-  def __init__(self, name, fv):
-    super(DefaultObjectAccessControlsPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'domain',
-        None,
-        u'The domain associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'email',
-        None,
-        u'The email address associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'entityId',
-        None,
-        u'The ID for the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'etag',
-        None,
-        u'HTTP 1.1 Entity tag for the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'The content generation of the object.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'id',
-        None,
-        u'The ID of the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'storage#objectAccessControl',
-        u'The kind of item this is. For object access control entries, this '
-        u'is always storage#objectAccessControl.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'object',
-        None,
-        u'The name of the object.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'projectTeam',
-        None,
-        u'The project team associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'role',
-        None,
-        u'The access permission for the entity. Can be READER or OWNER.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'selfLink',
-        None,
-        u'The link to this access-control entry.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, entity):
-    """Updates a default object ACL entry on the specified bucket. This method
-    supports patch semantics.
-
-    Args:
-      bucket: The name of the bucket.
-      entity: The entity holding the permission, in one of the following
-        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
-        domain-domain  - project-team-projectId  - allUsers  -
-        allAuthenticatedUsers Examples:  - The user liz@example.com would be
-        user-liz@example.com.  - The group example@googlegroups.com would be
-        group-example@googlegroups.com.  - To refer to all members of the
-        Google Apps for Business domain example.com, the entity would be
-        domain-example.com.
-
-    Flags:
-      domain: The domain associated with the entity, if any.
-      email: The email address associated with the entity, if any.
-      entityId: The ID for the entity, if any.
-      etag: HTTP 1.1 Entity tag for the access-control entry.
-      generation: The content generation of the object.
-      id: The ID of the access-control entry.
-      kind: The kind of item this is. For object access control entries, this
-        is always storage#objectAccessControl.
-      object: The name of the object.
-      projectTeam: The project team associated with the entity, if any.
-      role: The access permission for the entity. Can be READER or OWNER.
-      selfLink: The link to this access-control entry.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ObjectAccessControl(
-        bucket=bucket.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    if FLAGS['domain'].present:
-      request.domain = FLAGS.domain.decode('utf8')
-    if FLAGS['email'].present:
-      request.email = FLAGS.email.decode('utf8')
-    if FLAGS['entityId'].present:
-      request.entityId = FLAGS.entityId.decode('utf8')
-    if FLAGS['etag'].present:
-      request.etag = FLAGS.etag.decode('utf8')
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['id'].present:
-      request.id = FLAGS.id.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['object'].present:
-      request.object = FLAGS.object.decode('utf8')
-    if FLAGS['projectTeam'].present:
-      request.projectTeam = apitools_base.JsonToMessage(messages.ObjectAccessControl.ProjectTeamValue, FLAGS.projectTeam)
-    if FLAGS['role'].present:
-      request.role = FLAGS.role.decode('utf8')
-    if FLAGS['selfLink'].present:
-      request.selfLink = FLAGS.selfLink.decode('utf8')
-    result = client.defaultObjectAccessControls.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class DefaultObjectAccessControlsUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping defaultObjectAccessControls.Update."""
-
-  usage = """defaultObjectAccessControls_update <bucket> <entity>"""
-
-  def __init__(self, name, fv):
-    super(DefaultObjectAccessControlsUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'domain',
-        None,
-        u'The domain associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'email',
-        None,
-        u'The email address associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'entityId',
-        None,
-        u'The ID for the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'etag',
-        None,
-        u'HTTP 1.1 Entity tag for the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'The content generation of the object.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'id',
-        None,
-        u'The ID of the access-control entry.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'storage#objectAccessControl',
-        u'The kind of item this is. For object access control entries, this '
-        u'is always storage#objectAccessControl.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'object',
-        None,
-        u'The name of the object.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'projectTeam',
-        None,
-        u'The project team associated with the entity, if any.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'role',
-        None,
-        u'The access permission for the entity. Can be READER or OWNER.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'selfLink',
-        None,
-        u'The link to this access-control entry.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, entity):
-    """Updates a default object ACL entry on the specified bucket.
-
-    Args:
-      bucket: The name of the bucket.
-      entity: The entity holding the permission, in one of the following
-        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
-        domain-domain  - project-team-projectId  - allUsers  -
-        allAuthenticatedUsers Examples:  - The user liz@example.com would be
-        user-liz@example.com.  - The group example@googlegroups.com would be
-        group-example@googlegroups.com.  - To refer to all members of the
-        Google Apps for Business domain example.com, the entity would be
-        domain-example.com.
-
-    Flags:
-      domain: The domain associated with the entity, if any.
-      email: The email address associated with the entity, if any.
-      entityId: The ID for the entity, if any.
-      etag: HTTP 1.1 Entity tag for the access-control entry.
-      generation: The content generation of the object.
-      id: The ID of the access-control entry.
-      kind: The kind of item this is. For object access control entries, this
-        is always storage#objectAccessControl.
-      object: The name of the object.
-      projectTeam: The project team associated with the entity, if any.
-      role: The access permission for the entity. Can be READER or OWNER.
-      selfLink: The link to this access-control entry.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.ObjectAccessControl(
-        bucket=bucket.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    if FLAGS['domain'].present:
-      request.domain = FLAGS.domain.decode('utf8')
-    if FLAGS['email'].present:
-      request.email = FLAGS.email.decode('utf8')
-    if FLAGS['entityId'].present:
-      request.entityId = FLAGS.entityId.decode('utf8')
-    if FLAGS['etag'].present:
-      request.etag = FLAGS.etag.decode('utf8')
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['id'].present:
-      request.id = FLAGS.id.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['object'].present:
-      request.object = FLAGS.object.decode('utf8')
-    if FLAGS['projectTeam'].present:
-      request.projectTeam = apitools_base.JsonToMessage(messages.ObjectAccessControl.ProjectTeamValue, FLAGS.projectTeam)
-    if FLAGS['role'].present:
-      request.role = FLAGS.role.decode('utf8')
-    if FLAGS['selfLink'].present:
-      request.selfLink = FLAGS.selfLink.decode('utf8')
-    result = client.defaultObjectAccessControls.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class NotificationsDelete(apitools_base_cli.NewCmd):
-  """Command wrapping notifications.Delete."""
-
-  usage = """notifications_delete <notification>"""
-
-  def __init__(self, name, fv):
-    super(NotificationsDelete, self).__init__(name, fv)
-
-  def RunWithArgs(self, notification):
-    """Permanently deletes a notification subscription.
-
-    Args:
-      notification: ID of the notification to delete.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageNotificationsDeleteRequest(
-        notification=notification.decode('utf8'),
-        )
-    result = client.notifications.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class NotificationsGet(apitools_base_cli.NewCmd):
-  """Command wrapping notifications.Get."""
-
-  usage = """notifications_get <notification>"""
-
-  def __init__(self, name, fv):
-    super(NotificationsGet, self).__init__(name, fv)
-
-  def RunWithArgs(self, notification):
-    """View a notification configuration.
-
-    Args:
-      notification: Notification ID
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageNotificationsGetRequest(
-        notification=notification.decode('utf8'),
-        )
-    result = client.notifications.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class NotificationsInsert(apitools_base_cli.NewCmd):
-  """Command wrapping notifications.Insert."""
-
-  usage = """notifications_insert"""
-
-  def __init__(self, name, fv):
-    super(NotificationsInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'bucket',
-        None,
-        u'The name of the bucket this subscription is particular to.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'custom_attributes',
-        None,
-        u'An optional list of additional attributes to attach to each Cloud '
-        u'PubSub message published for this notification subscription.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'etag',
-        None,
-        u'HTTP 1.1 Entity tag for this subscription notification.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'event_types',
-        None,
-        u'If present, only send notifications about listed event types. If '
-        u'empty, sent notifications for all event types.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'id',
-        None,
-        u'The ID of the notification.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'kind',
-        u'storage#notification',
-        u'The kind of item this is. For notifications, this is always '
-        u'storage#notification.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'object_metadata_format',
-        u'JSON_API_V1',
-        u'If payload_content is OBJECT_METADATA, controls the format of that '
-        u'metadata. Otherwise, must not be set.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'object_name_prefix',
-        None,
-        u'If present, only apply this notification configuration to object '
-        u'names that begin with this prefix.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'payload_content',
-        u'OBJECT_METADATA',
-        u'The desired content of the Payload. Defaults to OBJECT_METADATA.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'selfLink',
-        None,
-        u'The canonical URL of this notification.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'topic',
-        None,
-        u'The Cloud PubSub topic to which this subscription publishes. '
-        u"Formatted as: '//pubsub.googleapis.com/projects/{project-"
-        u"identifier}/topics/{my-topic}'",
-        flag_values=fv)
-
-  def RunWithArgs(self):
-    """Creates a notification subscription for a given bucket.
-
-    Flags:
-      bucket: The name of the bucket this subscription is particular to.
-      custom_attributes: An optional list of additional attributes to attach
-        to each Cloud PubSub message published for this notification
-        subscription.
-      etag: HTTP 1.1 Entity tag for this subscription notification.
-      event_types: If present, only send notifications about listed event
-        types. If empty, sent notifications for all event types.
-      id: The ID of the notification.
-      kind: The kind of item this is. For notifications, this is always
-        storage#notification.
-      object_metadata_format: If payload_content is OBJECT_METADATA, controls
-        the format of that metadata. Otherwise, must not be set.
-      object_name_prefix: If present, only apply this notification
-        configuration to object names that begin with this prefix.
-      payload_content: The desired content of the Payload. Defaults to
-        OBJECT_METADATA.
-      selfLink: The canonical URL of this notification.
-      topic: The Cloud PubSub topic to which this subscription publishes.
-        Formatted as: '//pubsub.googleapis.com/projects/{project-
-        identifier}/topics/{my-topic}'
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.Notification(
-        )
-    if FLAGS['bucket'].present:
-      request.bucket = FLAGS.bucket.decode('utf8')
-    if FLAGS['custom_attributes'].present:
-      request.custom_attributes = apitools_base.JsonToMessage(messages.Notification.CustomAttributesValue, FLAGS.custom_attributes)
-    if FLAGS['etag'].present:
-      request.etag = FLAGS.etag.decode('utf8')
-    if FLAGS['event_types'].present:
-      request.event_types = [x.decode('utf8') for x in FLAGS.event_types]
-    if FLAGS['id'].present:
-      request.id = FLAGS.id.decode('utf8')
-    if FLAGS['kind'].present:
-      request.kind = FLAGS.kind.decode('utf8')
-    if FLAGS['object_metadata_format'].present:
-      request.object_metadata_format = FLAGS.object_metadata_format.decode('utf8')
-    if FLAGS['object_name_prefix'].present:
-      request.object_name_prefix = FLAGS.object_name_prefix.decode('utf8')
-    if FLAGS['payload_content'].present:
-      request.payload_content = FLAGS.payload_content.decode('utf8')
-    if FLAGS['selfLink'].present:
-      request.selfLink = FLAGS.selfLink.decode('utf8')
-    if FLAGS['topic'].present:
-      request.topic = FLAGS.topic.decode('utf8')
-    result = client.notifications.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class NotificationsList(apitools_base_cli.NewCmd):
-  """Command wrapping notifications.List."""
-
-  usage = """notifications_list <bucket>"""
-
-  def __init__(self, name, fv):
-    super(NotificationsList, self).__init__(name, fv)
-
-  def RunWithArgs(self, bucket):
-    """Retrieves a list of notification subscriptions for a given bucket.
-
-    Args:
-      bucket: Name of a GCS bucket.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageNotificationsListRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    result = client.notifications.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectAccessControlsDelete(apitools_base_cli.NewCmd):
-  """Command wrapping objectAccessControls.Delete."""
-
-  usage = """objectAccessControls_delete <bucket> <object> <entity>"""
-
-  def __init__(self, name, fv):
-    super(ObjectAccessControlsDelete, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object, entity):
-    """Permanently deletes the ACL entry for the specified entity on the
-    specified object.
-
-    Args:
-      bucket: Name of a bucket.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-      entity: The entity holding the permission. Can be user-userId, user-
-        emailAddress, group-groupId, group-emailAddress, allUsers, or
-        allAuthenticatedUsers.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectAccessControlsDeleteRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    result = client.objectAccessControls.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectAccessControlsGet(apitools_base_cli.NewCmd):
-  """Command wrapping objectAccessControls.Get."""
-
-  usage = """objectAccessControls_get <bucket> <object> <entity>"""
-
-  def __init__(self, name, fv):
-    super(ObjectAccessControlsGet, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object, entity):
-    """Returns the ACL entry for the specified entity on the specified object.
-
-    Args:
-      bucket: Name of a bucket.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-      entity: The entity holding the permission. Can be user-userId, user-
-        emailAddress, group-groupId, group-emailAddress, allUsers, or
-        allAuthenticatedUsers.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectAccessControlsGetRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    result = client.objectAccessControls.Get(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectAccessControlsInsert(apitools_base_cli.NewCmd):
-  """Command wrapping objectAccessControls.Insert."""
-
-  usage = """objectAccessControls_insert <bucket> <object>"""
-
-  def __init__(self, name, fv):
-    super(ObjectAccessControlsInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'objectAccessControl',
-        None,
-        u'A ObjectAccessControl resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object):
-    """Creates a new ACL entry on the specified object.
-
-    Args:
-      bucket: Name of a bucket.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-      objectAccessControl: A ObjectAccessControl resource to be passed as the
-        request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectAccessControlsInsertRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['objectAccessControl'].present:
-      request.objectAccessControl = apitools_base.JsonToMessage(messages.ObjectAccessControl, FLAGS.objectAccessControl)
-    result = client.objectAccessControls.Insert(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectAccessControlsList(apitools_base_cli.NewCmd):
-  """Command wrapping objectAccessControls.List."""
-
-  usage = """objectAccessControls_list <bucket> <object>"""
-
-  def __init__(self, name, fv):
-    super(ObjectAccessControlsList, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object):
-    """Retrieves ACL entries on the specified object.
-
-    Args:
-      bucket: Name of a bucket.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectAccessControlsListRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    result = client.objectAccessControls.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectAccessControlsPatch(apitools_base_cli.NewCmd):
-  """Command wrapping objectAccessControls.Patch."""
-
-  usage = """objectAccessControls_patch <bucket> <object> <entity>"""
-
-  def __init__(self, name, fv):
-    super(ObjectAccessControlsPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'objectAccessControl',
-        None,
-        u'A ObjectAccessControl resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object, entity):
-    """Updates an ACL entry on the specified object. This method supports
-    patch semantics.
-
-    Args:
-      bucket: Name of a bucket.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-      entity: The entity holding the permission. Can be user-userId, user-
-        emailAddress, group-groupId, group-emailAddress, allUsers, or
-        allAuthenticatedUsers.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-      objectAccessControl: A ObjectAccessControl resource to be passed as the
-        request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectAccessControlsPatchRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['objectAccessControl'].present:
-      request.objectAccessControl = apitools_base.JsonToMessage(messages.ObjectAccessControl, FLAGS.objectAccessControl)
-    result = client.objectAccessControls.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectAccessControlsUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping objectAccessControls.Update."""
-
-  usage = """objectAccessControls_update <bucket> <object> <entity>"""
-
-  def __init__(self, name, fv):
-    super(ObjectAccessControlsUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'objectAccessControl',
-        None,
-        u'A ObjectAccessControl resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object, entity):
-    """Updates an ACL entry on the specified object.
-
-    Args:
-      bucket: Name of a bucket.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-      entity: The entity holding the permission. Can be user-userId, user-
-        emailAddress, group-groupId, group-emailAddress, allUsers, or
-        allAuthenticatedUsers.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-      objectAccessControl: A ObjectAccessControl resource to be passed as the
-        request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectAccessControlsUpdateRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        entity=entity.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['objectAccessControl'].present:
-      request.objectAccessControl = apitools_base.JsonToMessage(messages.ObjectAccessControl, FLAGS.objectAccessControl)
-    result = client.objectAccessControls.Update(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsCompose(apitools_base_cli.NewCmd):
-  """Command wrapping objects.Compose."""
-
-  usage = """objects_compose <destinationBucket> <destinationObject>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsCompose, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'composeRequest',
-        None,
-        u'A ComposeRequest resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'destinationPredefinedAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
-        u'Apply a predefined set of access controls to the destination '
-        u'object.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'generation matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'download_filename',
-        '',
-        'Filename to use for download.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'overwrite',
-        'False',
-        'If True, overwrite the existing file when downloading.',
-        flag_values=fv)
-
-  def RunWithArgs(self, destinationBucket, destinationObject):
-    """Concatenates a list of existing objects into a new object in the same
-    bucket.
-
-    Args:
-      destinationBucket: Name of the bucket in which to store the new object.
-      destinationObject: Name of the new object. For information about how to
-        URL encode object names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      composeRequest: A ComposeRequest resource to be passed as the request
-        body.
-      destinationPredefinedAcl: Apply a predefined set of access controls to
-        the destination object.
-      ifGenerationMatch: Makes the operation conditional on whether the
-        object's current generation matches the given value.
-      ifMetagenerationMatch: Makes the operation conditional on whether the
-        object's current metageneration matches the given value.
-      download_filename: Filename to use for download.
-      overwrite: If True, overwrite the existing file when downloading.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsComposeRequest(
-        destinationBucket=destinationBucket.decode('utf8'),
-        destinationObject=destinationObject.decode('utf8'),
-        )
-    if FLAGS['composeRequest'].present:
-      request.composeRequest = apitools_base.JsonToMessage(messages.ComposeRequest, FLAGS.composeRequest)
-    if FLAGS['destinationPredefinedAcl'].present:
-      request.destinationPredefinedAcl = messages.StorageObjectsComposeRequest.DestinationPredefinedAclValueValuesEnum(FLAGS.destinationPredefinedAcl)
-    if FLAGS['ifGenerationMatch'].present:
-      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    download = None
-    if FLAGS.download_filename:
-      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
-          progress_callback=apitools_base.DownloadProgressPrinter,
-          finish_callback=apitools_base.DownloadCompletePrinter)
-    result = client.objects.Compose(
-        request, global_params=global_params, download=download)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsCopy(apitools_base_cli.NewCmd):
-  """Command wrapping objects.Copy."""
-
-  usage = """objects_copy <sourceBucket> <sourceObject> <destinationBucket> <destinationObject>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsCopy, self).__init__(name, fv)
-    flags.DEFINE_enum(
-        'destinationPredefinedAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
-        u'Apply a predefined set of access controls to the destination '
-        u'object.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the destination object's"
-        u' current generation matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the destination object's"
-        u' current generation does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the destination object's"
-        u' current metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the destination object's"
-        u' current metageneration does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifSourceGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the source object's "
-        u'generation matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifSourceGenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the source object's "
-        u'generation does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifSourceMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the source object's "
-        u'current metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifSourceMetagenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the source object's "
-        u'current metageneration does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'object',
-        None,
-        u'A Object resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to noAcl, unless the object '
-        u'resource specifies the acl property, when it defaults to full.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'sourceGeneration',
-        None,
-        u'If present, selects a specific revision of the source object (as '
-        u'opposed to the latest version, the default).',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'download_filename',
-        '',
-        'Filename to use for download.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'overwrite',
-        'False',
-        'If True, overwrite the existing file when downloading.',
-        flag_values=fv)
-
-  def RunWithArgs(self, sourceBucket, sourceObject, destinationBucket, destinationObject):
-    """Copies a source object to a destination object. Optionally overrides
-    metadata.
-
-    Args:
-      sourceBucket: Name of the bucket in which to find the source object.
-      sourceObject: Name of the source object. For information about how to
-        URL encode object names to be path safe, see Encoding URI Path Parts.
-      destinationBucket: Name of the bucket in which to store the new object.
-        Overrides the provided object metadata's bucket value, if any.For
-        information about how to URL encode object names to be path safe, see
-        Encoding URI Path Parts.
-      destinationObject: Name of the new object. Required when the object
-        metadata is not otherwise provided. Overrides the object metadata's
-        name value, if any.
-
-    Flags:
-      destinationPredefinedAcl: Apply a predefined set of access controls to
-        the destination object.
-      ifGenerationMatch: Makes the operation conditional on whether the
-        destination object's current generation matches the given value.
-      ifGenerationNotMatch: Makes the operation conditional on whether the
-        destination object's current generation does not match the given
-        value.
-      ifMetagenerationMatch: Makes the operation conditional on whether the
-        destination object's current metageneration matches the given value.
-      ifMetagenerationNotMatch: Makes the operation conditional on whether the
-        destination object's current metageneration does not match the given
-        value.
-      ifSourceGenerationMatch: Makes the operation conditional on whether the
-        source object's generation matches the given value.
-      ifSourceGenerationNotMatch: Makes the operation conditional on whether
-        the source object's generation does not match the given value.
-      ifSourceMetagenerationMatch: Makes the operation conditional on whether
-        the source object's current metageneration matches the given value.
-      ifSourceMetagenerationNotMatch: Makes the operation conditional on
-        whether the source object's current metageneration does not match the
-        given value.
-      object: A Object resource to be passed as the request body.
-      projection: Set of properties to return. Defaults to noAcl, unless the
-        object resource specifies the acl property, when it defaults to full.
-      sourceGeneration: If present, selects a specific revision of the source
-        object (as opposed to the latest version, the default).
-      download_filename: Filename to use for download.
-      overwrite: If True, overwrite the existing file when downloading.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsCopyRequest(
-        sourceBucket=sourceBucket.decode('utf8'),
-        sourceObject=sourceObject.decode('utf8'),
-        destinationBucket=destinationBucket.decode('utf8'),
-        destinationObject=destinationObject.decode('utf8'),
-        )
-    if FLAGS['destinationPredefinedAcl'].present:
-      request.destinationPredefinedAcl = messages.StorageObjectsCopyRequest.DestinationPredefinedAclValueValuesEnum(FLAGS.destinationPredefinedAcl)
-    if FLAGS['ifGenerationMatch'].present:
-      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
-    if FLAGS['ifGenerationNotMatch'].present:
-      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    if FLAGS['ifSourceGenerationMatch'].present:
-      request.ifSourceGenerationMatch = int(FLAGS.ifSourceGenerationMatch)
-    if FLAGS['ifSourceGenerationNotMatch'].present:
-      request.ifSourceGenerationNotMatch = int(FLAGS.ifSourceGenerationNotMatch)
-    if FLAGS['ifSourceMetagenerationMatch'].present:
-      request.ifSourceMetagenerationMatch = int(FLAGS.ifSourceMetagenerationMatch)
-    if FLAGS['ifSourceMetagenerationNotMatch'].present:
-      request.ifSourceMetagenerationNotMatch = int(FLAGS.ifSourceMetagenerationNotMatch)
-    if FLAGS['object'].present:
-      request.object = apitools_base.JsonToMessage(messages.Object, FLAGS.object)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageObjectsCopyRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    if FLAGS['sourceGeneration'].present:
-      request.sourceGeneration = int(FLAGS.sourceGeneration)
-    download = None
-    if FLAGS.download_filename:
-      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
-          progress_callback=apitools_base.DownloadProgressPrinter,
-          finish_callback=apitools_base.DownloadCompletePrinter)
-    result = client.objects.Copy(
-        request, global_params=global_params, download=download)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsDelete(apitools_base_cli.NewCmd):
-  """Command wrapping objects.Delete."""
-
-  usage = """objects_delete <bucket> <object>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsDelete, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, permanently deletes a specific revision of this object '
-        u'(as opposed to the latest version, the default).',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'generation matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'generation does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration does not match the given value.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object):
-    """Deletes an object and its metadata. Deletions are permanent if
-    versioning is not enabled for the bucket, or if the generation parameter
-    is used.
-
-    Args:
-      bucket: Name of the bucket in which the object resides.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      generation: If present, permanently deletes a specific revision of this
-        object (as opposed to the latest version, the default).
-      ifGenerationMatch: Makes the operation conditional on whether the
-        object's current generation matches the given value.
-      ifGenerationNotMatch: Makes the operation conditional on whether the
-        object's current generation does not match the given value.
-      ifMetagenerationMatch: Makes the operation conditional on whether the
-        object's current metageneration matches the given value.
-      ifMetagenerationNotMatch: Makes the operation conditional on whether the
-        object's current metageneration does not match the given value.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsDeleteRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['ifGenerationMatch'].present:
-      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
-    if FLAGS['ifGenerationNotMatch'].present:
-      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    result = client.objects.Delete(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsGet(apitools_base_cli.NewCmd):
-  """Command wrapping objects.Get."""
-
-  usage = """objects_get <bucket> <object>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsGet, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's generation "
-        u'matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's generation "
-        u'does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to noAcl.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'download_filename',
-        '',
-        'Filename to use for download.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'overwrite',
-        'False',
-        'If True, overwrite the existing file when downloading.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object):
-    """Retrieves an object or its metadata.
-
-    Args:
-      bucket: Name of the bucket in which the object resides.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-      ifGenerationMatch: Makes the operation conditional on whether the
-        object's generation matches the given value.
-      ifGenerationNotMatch: Makes the operation conditional on whether the
-        object's generation does not match the given value.
-      ifMetagenerationMatch: Makes the operation conditional on whether the
-        object's current metageneration matches the given value.
-      ifMetagenerationNotMatch: Makes the operation conditional on whether the
-        object's current metageneration does not match the given value.
-      projection: Set of properties to return. Defaults to noAcl.
-      download_filename: Filename to use for download.
-      overwrite: If True, overwrite the existing file when downloading.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsGetRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['ifGenerationMatch'].present:
-      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
-    if FLAGS['ifGenerationNotMatch'].present:
-      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageObjectsGetRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    download = None
-    if FLAGS.download_filename:
-      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
-          progress_callback=apitools_base.DownloadProgressPrinter,
-          finish_callback=apitools_base.DownloadCompletePrinter)
-    result = client.objects.Get(
-        request, global_params=global_params, download=download)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsGetIamPolicy(apitools_base_cli.NewCmd):
-  """Command wrapping objects.GetIamPolicy."""
-
-  usage = """objects_getIamPolicy <bucket> <object>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsGetIamPolicy, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object):
-    """Returns an IAM policy for the specified object.
-
-    Args:
-      bucket: Name of the bucket in which the object resides.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsGetIamPolicyRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    result = client.objects.GetIamPolicy(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsInsert(apitools_base_cli.NewCmd):
-  """Command wrapping objects.Insert."""
-
-  usage = """objects_insert <bucket>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsInsert, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'contentEncoding',
-        None,
-        u'If set, sets the contentEncoding property of the final object to '
-        u'this value. Setting this parameter is equivalent to setting the '
-        u'contentEncoding metadata property. This can be useful when '
-        u'uploading an object with uploadType=media to indicate the encoding '
-        u'of the content being uploaded.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'generation matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'generation does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'name',
-        None,
-        u'Name of the object. Required when the object metadata is not '
-        u"otherwise provided. Overrides the object metadata's name value, if "
-        u'any. For information about how to URL encode object names to be '
-        u'path safe, see Encoding URI Path Parts.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'object',
-        None,
-        u'A Object resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'predefinedAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
-        u'Apply a predefined set of access controls to this object.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to noAcl, unless the object '
-        u'resource specifies the acl property, when it defaults to full.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'upload_filename',
-        '',
-        'Filename to use for upload.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'upload_mime_type',
-        '',
-        'MIME type to use for the upload. Only needed if the extension on '
-        '--upload_filename does not determine the correct (or any) MIME '
-        'type.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'download_filename',
-        '',
-        'Filename to use for download.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'overwrite',
-        'False',
-        'If True, overwrite the existing file when downloading.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Stores a new object and metadata.
-
-    Args:
-      bucket: Name of the bucket in which to store the new object. Overrides
-        the provided object metadata's bucket value, if any.
-
-    Flags:
-      contentEncoding: If set, sets the contentEncoding property of the final
-        object to this value. Setting this parameter is equivalent to setting
-        the contentEncoding metadata property. This can be useful when
-        uploading an object with uploadType=media to indicate the encoding of
-        the content being uploaded.
-      ifGenerationMatch: Makes the operation conditional on whether the
-        object's current generation matches the given value.
-      ifGenerationNotMatch: Makes the operation conditional on whether the
-        object's current generation does not match the given value.
-      ifMetagenerationMatch: Makes the operation conditional on whether the
-        object's current metageneration matches the given value.
-      ifMetagenerationNotMatch: Makes the operation conditional on whether the
-        object's current metageneration does not match the given value.
-      name: Name of the object. Required when the object metadata is not
-        otherwise provided. Overrides the object metadata's name value, if
-        any. For information about how to URL encode object names to be path
-        safe, see Encoding URI Path Parts.
-      object: A Object resource to be passed as the request body.
-      predefinedAcl: Apply a predefined set of access controls to this object.
-      projection: Set of properties to return. Defaults to noAcl, unless the
-        object resource specifies the acl property, when it defaults to full.
-      upload_filename: Filename to use for upload.
-      upload_mime_type: MIME type to use for the upload. Only needed if the
-        extension on --upload_filename does not determine the correct (or any)
-        MIME type.
-      download_filename: Filename to use for download.
-      overwrite: If True, overwrite the existing file when downloading.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsInsertRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['contentEncoding'].present:
-      request.contentEncoding = FLAGS.contentEncoding.decode('utf8')
-    if FLAGS['ifGenerationMatch'].present:
-      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
-    if FLAGS['ifGenerationNotMatch'].present:
-      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    if FLAGS['name'].present:
-      request.name = FLAGS.name.decode('utf8')
-    if FLAGS['object'].present:
-      request.object = apitools_base.JsonToMessage(messages.Object, FLAGS.object)
-    if FLAGS['predefinedAcl'].present:
-      request.predefinedAcl = messages.StorageObjectsInsertRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageObjectsInsertRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    upload = None
-    if FLAGS.upload_filename:
-      upload = apitools_base.Upload.FromFile(
-          FLAGS.upload_filename, FLAGS.upload_mime_type,
-          progress_callback=apitools_base.UploadProgressPrinter,
-          finish_callback=apitools_base.UploadCompletePrinter)
-    download = None
-    if FLAGS.download_filename:
-      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
-          progress_callback=apitools_base.DownloadProgressPrinter,
-          finish_callback=apitools_base.DownloadCompletePrinter)
-    result = client.objects.Insert(
-        request, global_params=global_params, upload=upload, download=download)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsList(apitools_base_cli.NewCmd):
-  """Command wrapping objects.List."""
-
-  usage = """objects_list <bucket>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsList, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'delimiter',
-        None,
-        u'Returns results in a directory-like mode. items will contain only '
-        u'objects whose names, aside from the prefix, do not contain '
-        u'delimiter. Objects whose names, aside from the prefix, contain '
-        u'delimiter will have their name, truncated after the delimiter, '
-        u'returned in prefixes. Duplicate prefixes are omitted.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of items plus prefixes to return. As duplicate '
-        u'prefixes are omitted, fewer total results may be returned than '
-        u'requested. The default value of this parameter is 1,000 items.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'A previously-returned page token representing part of the larger '
-        u'set of results to view.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'prefix',
-        None,
-        u'Filter results to objects whose names begin with this prefix.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to noAcl.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'versions',
-        None,
-        u'If true, lists all versions of an object as distinct results. The '
-        u'default is false. For more information, see Object Versioning.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Retrieves a list of objects matching the criteria.
-
-    Args:
-      bucket: Name of the bucket in which to look for objects.
-
-    Flags:
-      delimiter: Returns results in a directory-like mode. items will contain
-        only objects whose names, aside from the prefix, do not contain
-        delimiter. Objects whose names, aside from the prefix, contain
-        delimiter will have their name, truncated after the delimiter,
-        returned in prefixes. Duplicate prefixes are omitted.
-      maxResults: Maximum number of items plus prefixes to return. As
-        duplicate prefixes are omitted, fewer total results may be returned
-        than requested. The default value of this parameter is 1,000 items.
-      pageToken: A previously-returned page token representing part of the
-        larger set of results to view.
-      prefix: Filter results to objects whose names begin with this prefix.
-      projection: Set of properties to return. Defaults to noAcl.
-      versions: If true, lists all versions of an object as distinct results.
-        The default is false. For more information, see Object Versioning.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsListRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['delimiter'].present:
-      request.delimiter = FLAGS.delimiter.decode('utf8')
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['prefix'].present:
-      request.prefix = FLAGS.prefix.decode('utf8')
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageObjectsListRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    if FLAGS['versions'].present:
-      request.versions = FLAGS.versions
-    result = client.objects.List(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsPatch(apitools_base_cli.NewCmd):
-  """Command wrapping objects.Patch."""
-
-  usage = """objects_patch <bucket> <object>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsPatch, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'generation matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'generation does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'objectResource',
-        None,
-        u'A Object resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'predefinedAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
-        u'Apply a predefined set of access controls to this object.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to full.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object):
-    """Updates an object's metadata. This method supports patch semantics.
-
-    Args:
-      bucket: Name of the bucket in which the object resides.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-      ifGenerationMatch: Makes the operation conditional on whether the
-        object's current generation matches the given value.
-      ifGenerationNotMatch: Makes the operation conditional on whether the
-        object's current generation does not match the given value.
-      ifMetagenerationMatch: Makes the operation conditional on whether the
-        object's current metageneration matches the given value.
-      ifMetagenerationNotMatch: Makes the operation conditional on whether the
-        object's current metageneration does not match the given value.
-      objectResource: A Object resource to be passed as the request body.
-      predefinedAcl: Apply a predefined set of access controls to this object.
-      projection: Set of properties to return. Defaults to full.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsPatchRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['ifGenerationMatch'].present:
-      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
-    if FLAGS['ifGenerationNotMatch'].present:
-      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    if FLAGS['objectResource'].present:
-      request.objectResource = apitools_base.JsonToMessage(messages.Object, FLAGS.objectResource)
-    if FLAGS['predefinedAcl'].present:
-      request.predefinedAcl = messages.StorageObjectsPatchRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageObjectsPatchRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    result = client.objects.Patch(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsRewrite(apitools_base_cli.NewCmd):
-  """Command wrapping objects.Rewrite."""
-
-  usage = """objects_rewrite <sourceBucket> <sourceObject> <destinationBucket> <destinationObject>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsRewrite, self).__init__(name, fv)
-    flags.DEFINE_enum(
-        'destinationPredefinedAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
-        u'Apply a predefined set of access controls to the destination '
-        u'object.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the destination object's"
-        u' current generation matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the destination object's"
-        u' current generation does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the destination object's"
-        u' current metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the destination object's"
-        u' current metageneration does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifSourceGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the source object's "
-        u'generation matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifSourceGenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the source object's "
-        u'generation does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifSourceMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the source object's "
-        u'current metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifSourceMetagenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the source object's "
-        u'current metageneration does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'maxBytesRewrittenPerCall',
-        None,
-        u'The maximum number of bytes that will be rewritten per rewrite '
-        u"request. Most callers shouldn't need to specify this parameter - it"
-        u' is primarily in place to support testing. If specified the value '
-        u'must be an integral multiple of 1 MiB (1048576). Also, this only '
-        u'applies to requests where the source and destination span locations'
-        u' and/or storage classes. Finally, this value must not change across'
-        u" rewrite calls else you'll get an error that the rewriteToken is "
-        u'invalid.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'object',
-        None,
-        u'A Object resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to noAcl, unless the object '
-        u'resource specifies the acl property, when it defaults to full.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'rewriteToken',
-        None,
-        u'Include this field (from the previous rewrite response) on each '
-        u'rewrite request after the first one, until the rewrite response '
-        u"'done' flag is true. Calls that provide a rewriteToken can omit all"
-        u' other request fields, but if included those fields must match the '
-        u'values provided in the first rewrite request.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'sourceGeneration',
-        None,
-        u'If present, selects a specific revision of the source object (as '
-        u'opposed to the latest version, the default).',
-        flag_values=fv)
-
-  def RunWithArgs(self, sourceBucket, sourceObject, destinationBucket, destinationObject):
-    """Rewrites a source object to a destination object. Optionally overrides
-    metadata.
-
-    Args:
-      sourceBucket: Name of the bucket in which to find the source object.
-      sourceObject: Name of the source object. For information about how to
-        URL encode object names to be path safe, see Encoding URI Path Parts.
-      destinationBucket: Name of the bucket in which to store the new object.
-        Overrides the provided object metadata's bucket value, if any.
-      destinationObject: Name of the new object. Required when the object
-        metadata is not otherwise provided. Overrides the object metadata's
-        name value, if any. For information about how to URL encode object
-        names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      destinationPredefinedAcl: Apply a predefined set of access controls to
-        the destination object.
-      ifGenerationMatch: Makes the operation conditional on whether the
-        destination object's current generation matches the given value.
-      ifGenerationNotMatch: Makes the operation conditional on whether the
-        destination object's current generation does not match the given
-        value.
-      ifMetagenerationMatch: Makes the operation conditional on whether the
-        destination object's current metageneration matches the given value.
-      ifMetagenerationNotMatch: Makes the operation conditional on whether the
-        destination object's current metageneration does not match the given
-        value.
-      ifSourceGenerationMatch: Makes the operation conditional on whether the
-        source object's generation matches the given value.
-      ifSourceGenerationNotMatch: Makes the operation conditional on whether
-        the source object's generation does not match the given value.
-      ifSourceMetagenerationMatch: Makes the operation conditional on whether
-        the source object's current metageneration matches the given value.
-      ifSourceMetagenerationNotMatch: Makes the operation conditional on
-        whether the source object's current metageneration does not match the
-        given value.
-      maxBytesRewrittenPerCall: The maximum number of bytes that will be
-        rewritten per rewrite request. Most callers shouldn't need to specify
-        this parameter - it is primarily in place to support testing. If
-        specified the value must be an integral multiple of 1 MiB (1048576).
-        Also, this only applies to requests where the source and destination
-        span locations and/or storage classes. Finally, this value must not
-        change across rewrite calls else you'll get an error that the
-        rewriteToken is invalid.
-      object: A Object resource to be passed as the request body.
-      projection: Set of properties to return. Defaults to noAcl, unless the
-        object resource specifies the acl property, when it defaults to full.
-      rewriteToken: Include this field (from the previous rewrite response) on
-        each rewrite request after the first one, until the rewrite response
-        'done' flag is true. Calls that provide a rewriteToken can omit all
-        other request fields, but if included those fields must match the
-        values provided in the first rewrite request.
-      sourceGeneration: If present, selects a specific revision of the source
-        object (as opposed to the latest version, the default).
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsRewriteRequest(
-        sourceBucket=sourceBucket.decode('utf8'),
-        sourceObject=sourceObject.decode('utf8'),
-        destinationBucket=destinationBucket.decode('utf8'),
-        destinationObject=destinationObject.decode('utf8'),
-        )
-    if FLAGS['destinationPredefinedAcl'].present:
-      request.destinationPredefinedAcl = messages.StorageObjectsRewriteRequest.DestinationPredefinedAclValueValuesEnum(FLAGS.destinationPredefinedAcl)
-    if FLAGS['ifGenerationMatch'].present:
-      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
-    if FLAGS['ifGenerationNotMatch'].present:
-      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    if FLAGS['ifSourceGenerationMatch'].present:
-      request.ifSourceGenerationMatch = int(FLAGS.ifSourceGenerationMatch)
-    if FLAGS['ifSourceGenerationNotMatch'].present:
-      request.ifSourceGenerationNotMatch = int(FLAGS.ifSourceGenerationNotMatch)
-    if FLAGS['ifSourceMetagenerationMatch'].present:
-      request.ifSourceMetagenerationMatch = int(FLAGS.ifSourceMetagenerationMatch)
-    if FLAGS['ifSourceMetagenerationNotMatch'].present:
-      request.ifSourceMetagenerationNotMatch = int(FLAGS.ifSourceMetagenerationNotMatch)
-    if FLAGS['maxBytesRewrittenPerCall'].present:
-      request.maxBytesRewrittenPerCall = int(FLAGS.maxBytesRewrittenPerCall)
-    if FLAGS['object'].present:
-      request.object = apitools_base.JsonToMessage(messages.Object, FLAGS.object)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageObjectsRewriteRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    if FLAGS['rewriteToken'].present:
-      request.rewriteToken = FLAGS.rewriteToken.decode('utf8')
-    if FLAGS['sourceGeneration'].present:
-      request.sourceGeneration = int(FLAGS.sourceGeneration)
-    result = client.objects.Rewrite(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsSetIamPolicy(apitools_base_cli.NewCmd):
-  """Command wrapping objects.SetIamPolicy."""
-
-  usage = """objects_setIamPolicy <bucket> <object>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsSetIamPolicy, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'policy',
-        None,
-        u'A Policy resource to be passed as the request body.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object):
-    """Updates an IAM policy for the specified object.
-
-    Args:
-      bucket: Name of the bucket in which the object resides.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-      policy: A Policy resource to be passed as the request body.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsSetIamPolicyRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['policy'].present:
-      request.policy = apitools_base.JsonToMessage(messages.Policy, FLAGS.policy)
-    result = client.objects.SetIamPolicy(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsTestIamPermissions(apitools_base_cli.NewCmd):
-  """Command wrapping objects.TestIamPermissions."""
-
-  usage = """objects_testIamPermissions <bucket> <object> <permissions>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsTestIamPermissions, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object, permissions):
-    """Tests a set of permissions on the given object to see which, if any,
-    are held by the caller.
-
-    Args:
-      bucket: Name of the bucket in which the object resides.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-      permissions: Permissions to test.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsTestIamPermissionsRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        permissions=permissions.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    result = client.objects.TestIamPermissions(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsUpdate(apitools_base_cli.NewCmd):
-  """Command wrapping objects.Update."""
-
-  usage = """objects_update <bucket> <object>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsUpdate, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'generation',
-        None,
-        u'If present, selects a specific revision of this object (as opposed '
-        u'to the latest version, the default).',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'generation matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifGenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'generation does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration matches the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'ifMetagenerationNotMatch',
-        None,
-        u"Makes the operation conditional on whether the object's current "
-        u'metageneration does not match the given value.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'objectResource',
-        None,
-        u'A Object resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'predefinedAcl',
-        u'authenticatedRead',
-        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
-        u'Apply a predefined set of access controls to this object.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to full.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'download_filename',
-        '',
-        'Filename to use for download.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'overwrite',
-        'False',
-        'If True, overwrite the existing file when downloading.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket, object):
-    """Updates an object's metadata.
-
-    Args:
-      bucket: Name of the bucket in which the object resides.
-      object: Name of the object. For information about how to URL encode
-        object names to be path safe, see Encoding URI Path Parts.
-
-    Flags:
-      generation: If present, selects a specific revision of this object (as
-        opposed to the latest version, the default).
-      ifGenerationMatch: Makes the operation conditional on whether the
-        object's current generation matches the given value.
-      ifGenerationNotMatch: Makes the operation conditional on whether the
-        object's current generation does not match the given value.
-      ifMetagenerationMatch: Makes the operation conditional on whether the
-        object's current metageneration matches the given value.
-      ifMetagenerationNotMatch: Makes the operation conditional on whether the
-        object's current metageneration does not match the given value.
-      objectResource: A Object resource to be passed as the request body.
-      predefinedAcl: Apply a predefined set of access controls to this object.
-      projection: Set of properties to return. Defaults to full.
-      download_filename: Filename to use for download.
-      overwrite: If True, overwrite the existing file when downloading.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsUpdateRequest(
-        bucket=bucket.decode('utf8'),
-        object=object.decode('utf8'),
-        )
-    if FLAGS['generation'].present:
-      request.generation = int(FLAGS.generation)
-    if FLAGS['ifGenerationMatch'].present:
-      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
-    if FLAGS['ifGenerationNotMatch'].present:
-      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
-    if FLAGS['ifMetagenerationMatch'].present:
-      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
-    if FLAGS['ifMetagenerationNotMatch'].present:
-      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
-    if FLAGS['objectResource'].present:
-      request.objectResource = apitools_base.JsonToMessage(messages.Object, FLAGS.objectResource)
-    if FLAGS['predefinedAcl'].present:
-      request.predefinedAcl = messages.StorageObjectsUpdateRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageObjectsUpdateRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    download = None
-    if FLAGS.download_filename:
-      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
-          progress_callback=apitools_base.DownloadProgressPrinter,
-          finish_callback=apitools_base.DownloadCompletePrinter)
-    result = client.objects.Update(
-        request, global_params=global_params, download=download)
-    print apitools_base_cli.FormatOutput(result)
-
-
-class ObjectsWatchAll(apitools_base_cli.NewCmd):
-  """Command wrapping objects.WatchAll."""
-
-  usage = """objects_watchAll <bucket>"""
-
-  def __init__(self, name, fv):
-    super(ObjectsWatchAll, self).__init__(name, fv)
-    flags.DEFINE_string(
-        'channel',
-        None,
-        u'A Channel resource to be passed as the request body.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'delimiter',
-        None,
-        u'Returns results in a directory-like mode. items will contain only '
-        u'objects whose names, aside from the prefix, do not contain '
-        u'delimiter. Objects whose names, aside from the prefix, contain '
-        u'delimiter will have their name, truncated after the delimiter, '
-        u'returned in prefixes. Duplicate prefixes are omitted.',
-        flag_values=fv)
-    flags.DEFINE_integer(
-        'maxResults',
-        None,
-        u'Maximum number of items plus prefixes to return. As duplicate '
-        u'prefixes are omitted, fewer total results may be returned than '
-        u'requested. The default value of this parameter is 1,000 items.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'pageToken',
-        None,
-        u'A previously-returned page token representing part of the larger '
-        u'set of results to view.',
-        flag_values=fv)
-    flags.DEFINE_string(
-        'prefix',
-        None,
-        u'Filter results to objects whose names begin with this prefix.',
-        flag_values=fv)
-    flags.DEFINE_enum(
-        'projection',
-        u'full',
-        [u'full', u'noAcl'],
-        u'Set of properties to return. Defaults to noAcl.',
-        flag_values=fv)
-    flags.DEFINE_boolean(
-        'versions',
-        None,
-        u'If true, lists all versions of an object as distinct results. The '
-        u'default is false. For more information, see Object Versioning.',
-        flag_values=fv)
-
-  def RunWithArgs(self, bucket):
-    """Watch for changes on all objects in a bucket.
-
-    Args:
-      bucket: Name of the bucket in which to look for objects.
-
-    Flags:
-      channel: A Channel resource to be passed as the request body.
-      delimiter: Returns results in a directory-like mode. items will contain
-        only objects whose names, aside from the prefix, do not contain
-        delimiter. Objects whose names, aside from the prefix, contain
-        delimiter will have their name, truncated after the delimiter,
-        returned in prefixes. Duplicate prefixes are omitted.
-      maxResults: Maximum number of items plus prefixes to return. As
-        duplicate prefixes are omitted, fewer total results may be returned
-        than requested. The default value of this parameter is 1,000 items.
-      pageToken: A previously-returned page token representing part of the
-        larger set of results to view.
-      prefix: Filter results to objects whose names begin with this prefix.
-      projection: Set of properties to return. Defaults to noAcl.
-      versions: If true, lists all versions of an object as distinct results.
-        The default is false. For more information, see Object Versioning.
-    """
-    client = GetClientFromFlags()
-    global_params = GetGlobalParamsFromFlags()
-    request = messages.StorageObjectsWatchAllRequest(
-        bucket=bucket.decode('utf8'),
-        )
-    if FLAGS['channel'].present:
-      request.channel = apitools_base.JsonToMessage(messages.Channel, FLAGS.channel)
-    if FLAGS['delimiter'].present:
-      request.delimiter = FLAGS.delimiter.decode('utf8')
-    if FLAGS['maxResults'].present:
-      request.maxResults = FLAGS.maxResults
-    if FLAGS['pageToken'].present:
-      request.pageToken = FLAGS.pageToken.decode('utf8')
-    if FLAGS['prefix'].present:
-      request.prefix = FLAGS.prefix.decode('utf8')
-    if FLAGS['projection'].present:
-      request.projection = messages.StorageObjectsWatchAllRequest.ProjectionValueValuesEnum(FLAGS.projection)
-    if FLAGS['versions'].present:
-      request.versions = FLAGS.versions
-    result = client.objects.WatchAll(
-        request, global_params=global_params)
-    print apitools_base_cli.FormatOutput(result)
-
-
-def main(_):
-  appcommands.AddCmd('pyshell', PyShell)
-  appcommands.AddCmd('bucketAccessControls_delete', BucketAccessControlsDelete)
-  appcommands.AddCmd('bucketAccessControls_get', BucketAccessControlsGet)
-  appcommands.AddCmd('bucketAccessControls_insert', BucketAccessControlsInsert)
-  appcommands.AddCmd('bucketAccessControls_list', BucketAccessControlsList)
-  appcommands.AddCmd('bucketAccessControls_patch', BucketAccessControlsPatch)
-  appcommands.AddCmd('bucketAccessControls_update', BucketAccessControlsUpdate)
-  appcommands.AddCmd('buckets_delete', BucketsDelete)
-  appcommands.AddCmd('buckets_get', BucketsGet)
-  appcommands.AddCmd('buckets_getIamPolicy', BucketsGetIamPolicy)
-  appcommands.AddCmd('buckets_insert', BucketsInsert)
-  appcommands.AddCmd('buckets_list', BucketsList)
-  appcommands.AddCmd('buckets_patch', BucketsPatch)
-  appcommands.AddCmd('buckets_setIamPolicy', BucketsSetIamPolicy)
-  appcommands.AddCmd('buckets_testIamPermissions', BucketsTestIamPermissions)
-  appcommands.AddCmd('buckets_update', BucketsUpdate)
-  appcommands.AddCmd('channels_stop', ChannelsStop)
-  appcommands.AddCmd('defaultObjectAccessControls_delete', DefaultObjectAccessControlsDelete)
-  appcommands.AddCmd('defaultObjectAccessControls_get', DefaultObjectAccessControlsGet)
-  appcommands.AddCmd('defaultObjectAccessControls_insert', DefaultObjectAccessControlsInsert)
-  appcommands.AddCmd('defaultObjectAccessControls_list', DefaultObjectAccessControlsList)
-  appcommands.AddCmd('defaultObjectAccessControls_patch', DefaultObjectAccessControlsPatch)
-  appcommands.AddCmd('defaultObjectAccessControls_update', DefaultObjectAccessControlsUpdate)
-  appcommands.AddCmd('notifications_delete', NotificationsDelete)
-  appcommands.AddCmd('notifications_get', NotificationsGet)
-  appcommands.AddCmd('notifications_insert', NotificationsInsert)
-  appcommands.AddCmd('notifications_list', NotificationsList)
-  appcommands.AddCmd('objectAccessControls_delete', ObjectAccessControlsDelete)
-  appcommands.AddCmd('objectAccessControls_get', ObjectAccessControlsGet)
-  appcommands.AddCmd('objectAccessControls_insert', ObjectAccessControlsInsert)
-  appcommands.AddCmd('objectAccessControls_list', ObjectAccessControlsList)
-  appcommands.AddCmd('objectAccessControls_patch', ObjectAccessControlsPatch)
-  appcommands.AddCmd('objectAccessControls_update', ObjectAccessControlsUpdate)
-  appcommands.AddCmd('objects_compose', ObjectsCompose)
-  appcommands.AddCmd('objects_copy', ObjectsCopy)
-  appcommands.AddCmd('objects_delete', ObjectsDelete)
-  appcommands.AddCmd('objects_get', ObjectsGet)
-  appcommands.AddCmd('objects_getIamPolicy', ObjectsGetIamPolicy)
-  appcommands.AddCmd('objects_insert', ObjectsInsert)
-  appcommands.AddCmd('objects_list', ObjectsList)
-  appcommands.AddCmd('objects_patch', ObjectsPatch)
-  appcommands.AddCmd('objects_rewrite', ObjectsRewrite)
-  appcommands.AddCmd('objects_setIamPolicy', ObjectsSetIamPolicy)
-  appcommands.AddCmd('objects_testIamPermissions', ObjectsTestIamPermissions)
-  appcommands.AddCmd('objects_update', ObjectsUpdate)
-  appcommands.AddCmd('objects_watchAll', ObjectsWatchAll)
-
-  apitools_base_cli.SetupLogger()
-  if hasattr(appcommands, 'SetDefaultCommand'):
-    appcommands.SetDefaultCommand('pyshell')
-
-
-run_main = apitools_base_cli.run_main
-
-if __name__ == '__main__':
-  appcommands.Run()
diff --git a/samples/storage_sample/storage_v1/storage_v1_client.py b/samples/storage_sample/storage_v1/storage_v1_client.py
index 74dfdc4..38ceab9 100644
--- a/samples/storage_sample/storage_v1/storage_v1_client.py
+++ b/samples/storage_sample/storage_v1/storage_v1_client.py
@@ -24,7 +24,7 @@
                get_credentials=True, http=None, model=None,
                log_request=False, log_response=False,
                credentials_args=None, default_global_params=None,
-               additional_http_headers=None):
+               additional_http_headers=None, response_encoding=None):
     """Create a new storage handle."""
     url = url or self.BASE_URL
     super(StorageV1, self).__init__(
@@ -33,7 +33,8 @@
         log_request=log_request, log_response=log_response,
         credentials_args=credentials_args,
         default_global_params=default_global_params,
-        additional_http_headers=additional_http_headers)
+        additional_http_headers=additional_http_headers,
+        response_encoding=response_encoding)
     self.bucketAccessControls = self.BucketAccessControlsService(self)
     self.buckets = self.BucketsService(self)
     self.channels = self.ChannelsService(self)
@@ -53,7 +54,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Permanently deletes the ACL entry for the specified entity on the specified bucket.
+      r"""Permanently deletes the ACL entry for the specified entity on the specified bucket.
 
       Args:
         request: (StorageBucketAccessControlsDeleteRequest) input message
@@ -79,7 +80,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Returns the ACL entry for the specified entity on the specified bucket.
+      r"""Returns the ACL entry for the specified entity on the specified bucket.
 
       Args:
         request: (StorageBucketAccessControlsGetRequest) input message
@@ -105,7 +106,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Creates a new ACL entry on the specified bucket.
+      r"""Creates a new ACL entry on the specified bucket.
 
       Args:
         request: (BucketAccessControl) input message
@@ -131,7 +132,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves ACL entries on the specified bucket.
+      r"""Retrieves ACL entries on the specified bucket.
 
       Args:
         request: (StorageBucketAccessControlsListRequest) input message
@@ -157,7 +158,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates an ACL entry on the specified bucket. This method supports patch semantics.
+      r"""Updates an ACL entry on the specified bucket. This method supports patch semantics.
 
       Args:
         request: (BucketAccessControl) input message
@@ -183,7 +184,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates an ACL entry on the specified bucket.
+      r"""Updates an ACL entry on the specified bucket.
 
       Args:
         request: (BucketAccessControl) input message
@@ -219,7 +220,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Permanently deletes an empty bucket.
+      r"""Permanently deletes an empty bucket.
 
       Args:
         request: (StorageBucketsDeleteRequest) input message
@@ -245,7 +246,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Returns metadata for the specified bucket.
+      r"""Returns metadata for the specified bucket.
 
       Args:
         request: (StorageBucketsGetRequest) input message
@@ -271,7 +272,7 @@
     )
 
     def GetIamPolicy(self, request, global_params=None):
-      """Returns an IAM policy for the specified bucket.
+      r"""Returns an IAM policy for the specified bucket.
 
       Args:
         request: (StorageBucketsGetIamPolicyRequest) input message
@@ -297,7 +298,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Creates a new bucket.
+      r"""Creates a new bucket.
 
       Args:
         request: (StorageBucketsInsertRequest) input message
@@ -323,7 +324,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves a list of buckets for a given project.
+      r"""Retrieves a list of buckets for a given project.
 
       Args:
         request: (StorageBucketsListRequest) input message
@@ -349,7 +350,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates a bucket. This method supports patch semantics.
+      r"""Updates a bucket. This method supports patch semantics.
 
       Args:
         request: (StorageBucketsPatchRequest) input message
@@ -375,7 +376,7 @@
     )
 
     def SetIamPolicy(self, request, global_params=None):
-      """Updates an IAM policy for the specified bucket.
+      r"""Updates an IAM policy for the specified bucket.
 
       Args:
         request: (StorageBucketsSetIamPolicyRequest) input message
@@ -401,7 +402,7 @@
     )
 
     def TestIamPermissions(self, request, global_params=None):
-      """Tests a set of permissions on the given bucket to see which, if any, are held by the caller.
+      r"""Tests a set of permissions on the given bucket to see which, if any, are held by the caller.
 
       Args:
         request: (StorageBucketsTestIamPermissionsRequest) input message
@@ -427,7 +428,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates a bucket.
+      r"""Updates a bucket.
 
       Args:
         request: (StorageBucketsUpdateRequest) input message
@@ -463,7 +464,7 @@
           }
 
     def Stop(self, request, global_params=None):
-      """Stop watching resources through this channel.
+      r"""Stop watching resources through this channel.
 
       Args:
         request: (Channel) input message
@@ -499,7 +500,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Permanently deletes the default object ACL entry for the specified entity on the specified bucket.
+      r"""Permanently deletes the default object ACL entry for the specified entity on the specified bucket.
 
       Args:
         request: (StorageDefaultObjectAccessControlsDeleteRequest) input message
@@ -525,7 +526,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Returns the default object ACL entry for the specified entity on the specified bucket.
+      r"""Returns the default object ACL entry for the specified entity on the specified bucket.
 
       Args:
         request: (StorageDefaultObjectAccessControlsGetRequest) input message
@@ -551,7 +552,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Creates a new default object ACL entry on the specified bucket.
+      r"""Creates a new default object ACL entry on the specified bucket.
 
       Args:
         request: (ObjectAccessControl) input message
@@ -577,7 +578,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves default object ACL entries on the specified bucket.
+      r"""Retrieves default object ACL entries on the specified bucket.
 
       Args:
         request: (StorageDefaultObjectAccessControlsListRequest) input message
@@ -603,7 +604,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates a default object ACL entry on the specified bucket. This method supports patch semantics.
+      r"""Updates a default object ACL entry on the specified bucket. This method supports patch semantics.
 
       Args:
         request: (ObjectAccessControl) input message
@@ -629,7 +630,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates a default object ACL entry on the specified bucket.
+      r"""Updates a default object ACL entry on the specified bucket.
 
       Args:
         request: (ObjectAccessControl) input message
@@ -665,7 +666,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Permanently deletes a notification subscription.
+      r"""Permanently deletes a notification subscription.
 
       Args:
         request: (StorageNotificationsDeleteRequest) input message
@@ -691,7 +692,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """View a notification configuration.
+      r"""View a notification configuration.
 
       Args:
         request: (StorageNotificationsGetRequest) input message
@@ -717,7 +718,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Creates a notification subscription for a given bucket.
+      r"""Creates a notification subscription for a given bucket.
 
       Args:
         request: (Notification) input message
@@ -743,7 +744,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves a list of notification subscriptions for a given bucket.
+      r"""Retrieves a list of notification subscriptions for a given bucket.
 
       Args:
         request: (StorageNotificationsListRequest) input message
@@ -779,7 +780,7 @@
           }
 
     def Delete(self, request, global_params=None):
-      """Permanently deletes the ACL entry for the specified entity on the specified object.
+      r"""Permanently deletes the ACL entry for the specified entity on the specified object.
 
       Args:
         request: (StorageObjectAccessControlsDeleteRequest) input message
@@ -805,7 +806,7 @@
     )
 
     def Get(self, request, global_params=None):
-      """Returns the ACL entry for the specified entity on the specified object.
+      r"""Returns the ACL entry for the specified entity on the specified object.
 
       Args:
         request: (StorageObjectAccessControlsGetRequest) input message
@@ -831,7 +832,7 @@
     )
 
     def Insert(self, request, global_params=None):
-      """Creates a new ACL entry on the specified object.
+      r"""Creates a new ACL entry on the specified object.
 
       Args:
         request: (StorageObjectAccessControlsInsertRequest) input message
@@ -857,7 +858,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves ACL entries on the specified object.
+      r"""Retrieves ACL entries on the specified object.
 
       Args:
         request: (StorageObjectAccessControlsListRequest) input message
@@ -883,7 +884,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates an ACL entry on the specified object. This method supports patch semantics.
+      r"""Updates an ACL entry on the specified object. This method supports patch semantics.
 
       Args:
         request: (StorageObjectAccessControlsPatchRequest) input message
@@ -909,7 +910,7 @@
     )
 
     def Update(self, request, global_params=None):
-      """Updates an ACL entry on the specified object.
+      r"""Updates an ACL entry on the specified object.
 
       Args:
         request: (StorageObjectAccessControlsUpdateRequest) input message
@@ -953,7 +954,7 @@
           }
 
     def Compose(self, request, global_params=None, download=None):
-      """Concatenates a list of existing objects into a new object in the same bucket.
+      r"""Concatenates a list of existing objects into a new object in the same bucket.
 
       Args:
         request: (StorageObjectsComposeRequest) input message
@@ -982,7 +983,7 @@
     )
 
     def Copy(self, request, global_params=None, download=None):
-      """Copies a source object to a destination object. Optionally overrides metadata.
+      r"""Copies a source object to a destination object. Optionally overrides metadata.
 
       Args:
         request: (StorageObjectsCopyRequest) input message
@@ -1011,7 +1012,7 @@
     )
 
     def Delete(self, request, global_params=None):
-      """Deletes an object and its metadata. Deletions are permanent if versioning is not enabled for the bucket, or if the generation parameter is used.
+      r"""Deletes an object and its metadata. Deletions are permanent if versioning is not enabled for the bucket, or if the generation parameter is used.
 
       Args:
         request: (StorageObjectsDeleteRequest) input message
@@ -1037,7 +1038,7 @@
     )
 
     def Get(self, request, global_params=None, download=None):
-      """Retrieves an object or its metadata.
+      r"""Retrieves an object or its metadata.
 
       Args:
         request: (StorageObjectsGetRequest) input message
@@ -1066,7 +1067,7 @@
     )
 
     def GetIamPolicy(self, request, global_params=None):
-      """Returns an IAM policy for the specified object.
+      r"""Returns an IAM policy for the specified object.
 
       Args:
         request: (StorageObjectsGetIamPolicyRequest) input message
@@ -1092,7 +1093,7 @@
     )
 
     def Insert(self, request, global_params=None, upload=None, download=None):
-      """Stores a new object and metadata.
+      r"""Stores a new object and metadata.
 
       Args:
         request: (StorageObjectsInsertRequest) input message
@@ -1125,7 +1126,7 @@
     )
 
     def List(self, request, global_params=None):
-      """Retrieves a list of objects matching the criteria.
+      r"""Retrieves a list of objects matching the criteria.
 
       Args:
         request: (StorageObjectsListRequest) input message
@@ -1151,7 +1152,7 @@
     )
 
     def Patch(self, request, global_params=None):
-      """Updates an object's metadata. This method supports patch semantics.
+      r"""Updates an object's metadata. This method supports patch semantics.
 
       Args:
         request: (StorageObjectsPatchRequest) input message
@@ -1177,7 +1178,7 @@
     )
 
     def Rewrite(self, request, global_params=None):
-      """Rewrites a source object to a destination object. Optionally overrides metadata.
+      r"""Rewrites a source object to a destination object. Optionally overrides metadata.
 
       Args:
         request: (StorageObjectsRewriteRequest) input message
@@ -1203,7 +1204,7 @@
     )
 
     def SetIamPolicy(self, request, global_params=None):
-      """Updates an IAM policy for the specified object.
+      r"""Updates an IAM policy for the specified object.
 
       Args:
         request: (StorageObjectsSetIamPolicyRequest) input message
@@ -1229,7 +1230,7 @@
     )
 
     def TestIamPermissions(self, request, global_params=None):
-      """Tests a set of permissions on the given object to see which, if any, are held by the caller.
+      r"""Tests a set of permissions on the given object to see which, if any, are held by the caller.
 
       Args:
         request: (StorageObjectsTestIamPermissionsRequest) input message
@@ -1255,7 +1256,7 @@
     )
 
     def Update(self, request, global_params=None, download=None):
-      """Updates an object's metadata.
+      r"""Updates an object's metadata.
 
       Args:
         request: (StorageObjectsUpdateRequest) input message
@@ -1284,7 +1285,7 @@
     )
 
     def WatchAll(self, request, global_params=None):
-      """Watch for changes on all objects in a bucket.
+      r"""Watch for changes on all objects in a bucket.
 
       Args:
         request: (StorageObjectsWatchAllRequest) input message
diff --git a/samples/storage_sample/storage_v1/storage_v1_messages.py b/samples/storage_sample/storage_v1/storage_v1_messages.py
index f392334..703d79b 100644
--- a/samples/storage_sample/storage_v1/storage_v1_messages.py
+++ b/samples/storage_sample/storage_v1/storage_v1_messages.py
@@ -14,7 +14,7 @@
 
 
 class Bucket(_messages.Message):
-  """A bucket.
+  r"""A bucket.
 
   Messages:
     CorsValueListEntry: A CorsValueListEntry object.
@@ -65,7 +65,7 @@
   """
 
   class CorsValueListEntry(_messages.Message):
-    """A CorsValueListEntry object.
+    r"""A CorsValueListEntry object.
 
     Fields:
       maxAgeSeconds: The value, in seconds, to return in the  Access-Control-
@@ -85,8 +85,8 @@
     responseHeader = _messages.StringField(4, repeated=True)
 
   class LifecycleValue(_messages.Message):
-    """The bucket's lifecycle configuration. See lifecycle management for more
-    information.
+    r"""The bucket's lifecycle configuration. See lifecycle management for
+    more information.
 
     Messages:
       RuleValueListEntry: A RuleValueListEntry object.
@@ -97,7 +97,7 @@
     """
 
     class RuleValueListEntry(_messages.Message):
-      """A RuleValueListEntry object.
+      r"""A RuleValueListEntry object.
 
       Messages:
         ActionValue: The action to take.
@@ -109,7 +109,7 @@
       """
 
       class ActionValue(_messages.Message):
-        """The action to take.
+        r"""The action to take.
 
         Fields:
           type: Type of the action. Currently, only Delete is supported.
@@ -118,7 +118,7 @@
         type = _messages.StringField(1)
 
       class ConditionValue(_messages.Message):
-        """The condition(s) under which the action will be taken.
+        r"""The condition(s) under which the action will be taken.
 
         Fields:
           age: Age of an object (in days). This condition is satisfied when an
@@ -146,7 +146,7 @@
     rule = _messages.MessageField('RuleValueListEntry', 1, repeated=True)
 
   class LoggingValue(_messages.Message):
-    """The bucket's logging configuration, which defines the destination
+    r"""The bucket's logging configuration, which defines the destination
     bucket and optional name prefix for the current bucket's logs.
 
     Fields:
@@ -159,7 +159,8 @@
     logObjectPrefix = _messages.StringField(2)
 
   class OwnerValue(_messages.Message):
-    """The owner of the bucket. This is always the project team's owner group.
+    r"""The owner of the bucket. This is always the project team's owner
+    group.
 
     Fields:
       entity: The entity, in the form project-owner-projectId.
@@ -170,7 +171,7 @@
     entityId = _messages.StringField(2)
 
   class VersioningValue(_messages.Message):
-    """The bucket's versioning configuration.
+    r"""The bucket's versioning configuration.
 
     Fields:
       enabled: While set to true, versioning is fully enabled for this bucket.
@@ -179,9 +180,9 @@
     enabled = _messages.BooleanField(1)
 
   class WebsiteValue(_messages.Message):
-    """The bucket's website configuration, controlling how the service behaves
-    when accessing bucket contents as a web site. See the Static Website
-    Examples for more information.
+    r"""The bucket's website configuration, controlling how the service
+    behaves when accessing bucket contents as a web site. See the Static
+    Website Examples for more information.
 
     Fields:
       mainPageSuffix: If the requested object path is missing, the service
@@ -219,7 +220,7 @@
 
 
 class BucketAccessControl(_messages.Message):
-  """An access-control entry.
+  r"""An access-control entry.
 
   Messages:
     ProjectTeamValue: The project team associated with the entity, if any.
@@ -247,7 +248,7 @@
   """
 
   class ProjectTeamValue(_messages.Message):
-    """The project team associated with the entity, if any.
+    r"""The project team associated with the entity, if any.
 
     Fields:
       projectNumber: The project number.
@@ -271,7 +272,7 @@
 
 
 class BucketAccessControls(_messages.Message):
-  """An access-control list.
+  r"""An access-control list.
 
   Fields:
     items: The list of items.
@@ -284,7 +285,7 @@
 
 
 class Buckets(_messages.Message):
-  """A list of buckets.
+  r"""A list of buckets.
 
   Fields:
     items: The list of items.
@@ -301,7 +302,7 @@
 
 
 class Channel(_messages.Message):
-  """An notification channel used to watch for resource changes.
+  r"""An notification channel used to watch for resource changes.
 
   Messages:
     ParamsValue: Additional parameters controlling delivery channel behavior.
@@ -327,7 +328,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ParamsValue(_messages.Message):
-    """Additional parameters controlling delivery channel behavior. Optional.
+    r"""Additional parameters controlling delivery channel behavior. Optional.
 
     Messages:
       AdditionalProperty: An additional property for a ParamsValue object.
@@ -337,7 +338,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ParamsValue object.
+      r"""An additional property for a ParamsValue object.
 
       Fields:
         key: Name of the additional property.
@@ -362,7 +363,7 @@
 
 
 class ComposeRequest(_messages.Message):
-  """A Compose request.
+  r"""A Compose request.
 
   Messages:
     SourceObjectsValueListEntry: A SourceObjectsValueListEntry object.
@@ -375,7 +376,7 @@
   """
 
   class SourceObjectsValueListEntry(_messages.Message):
-    """A SourceObjectsValueListEntry object.
+    r"""A SourceObjectsValueListEntry object.
 
     Messages:
       ObjectPreconditionsValue: Conditions that must be met for this operation
@@ -390,7 +391,7 @@
     """
 
     class ObjectPreconditionsValue(_messages.Message):
-      """Conditions that must be met for this operation to execute.
+      r"""Conditions that must be met for this operation to execute.
 
       Fields:
         ifGenerationMatch: Only perform the composition if the generation of
@@ -411,7 +412,7 @@
 
 
 class Notification(_messages.Message):
-  """A subscription to receive Google PubSub notifications.
+  r"""A subscription to receive Google PubSub notifications.
 
   Messages:
     CustomAttributesValue: An optional list of additional attributes to attach
@@ -442,7 +443,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class CustomAttributesValue(_messages.Message):
-    """An optional list of additional attributes to attach to each Cloud
+    r"""An optional list of additional attributes to attach to each Cloud
     PubSub message published for this notification subscription.
 
     Messages:
@@ -455,7 +456,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a CustomAttributesValue object.
+      r"""An additional property for a CustomAttributesValue object.
 
       Fields:
         key: Name of the additional property.
@@ -481,7 +482,7 @@
 
 
 class Notifications(_messages.Message):
-  """A list of notification subscriptions.
+  r"""A list of notification subscriptions.
 
   Fields:
     items: The list of items.
@@ -494,7 +495,7 @@
 
 
 class Object(_messages.Message):
-  """An object.
+  r"""An object.
 
   Messages:
     CustomerEncryptionValue: Metadata of customer-supplied encryption key, if
@@ -546,7 +547,7 @@
   """
 
   class CustomerEncryptionValue(_messages.Message):
-    """Metadata of customer-supplied encryption key, if the object is
+    r"""Metadata of customer-supplied encryption key, if the object is
     encrypted by such a key.
 
     Fields:
@@ -559,7 +560,7 @@
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class MetadataValue(_messages.Message):
-    """User-provided metadata, in key/value pairs.
+    r"""User-provided metadata, in key/value pairs.
 
     Messages:
       AdditionalProperty: An additional property for a MetadataValue object.
@@ -569,7 +570,7 @@
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a MetadataValue object.
+      r"""An additional property for a MetadataValue object.
 
       Fields:
         key: Name of the additional property.
@@ -582,7 +583,7 @@
     additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
 
   class OwnerValue(_messages.Message):
-    """The owner of the object. This will always be the uploader of the
+    r"""The owner of the object. This will always be the uploader of the
     object.
 
     Fields:
@@ -622,7 +623,7 @@
 
 
 class ObjectAccessControl(_messages.Message):
-  """An access-control entry.
+  r"""An access-control entry.
 
   Messages:
     ProjectTeamValue: The project team associated with the entity, if any.
@@ -651,7 +652,7 @@
   """
 
   class ProjectTeamValue(_messages.Message):
-    """The project team associated with the entity, if any.
+    r"""The project team associated with the entity, if any.
 
     Fields:
       projectNumber: The project number.
@@ -677,7 +678,7 @@
 
 
 class ObjectAccessControls(_messages.Message):
-  """An access-control list.
+  r"""An access-control list.
 
   Fields:
     items: The list of items.
@@ -690,7 +691,7 @@
 
 
 class Objects(_messages.Message):
-  """A list of objects.
+  r"""A list of objects.
 
   Fields:
     items: The list of items.
@@ -710,7 +711,7 @@
 
 
 class Policy(_messages.Message):
-  """A bucket/object IAM policy.
+  r"""A bucket/object IAM policy.
 
   Messages:
     BindingsValueListEntry: A BindingsValueListEntry object.
@@ -730,48 +731,48 @@
   """
 
   class BindingsValueListEntry(_messages.Message):
-    """A BindingsValueListEntry object.
+    r"""A BindingsValueListEntry object.
 
     Fields:
       members: A collection of identifiers for members who may assume the
-        provided role. Recognized identifiers are as follows:   - allUsers \u2014 A
+        provided role. Recognized identifiers are as follows:   - allUsers - A
         special identifier that represents anyone on the internet; with or
-        without a Google account.   - allAuthenticatedUsers \u2014 A special
+        without a Google account.   - allAuthenticatedUsers - A special
         identifier that represents anyone who is authenticated with a Google
-        account or a service account.   - user:emailid \u2014 An email address that
+        account or a service account.   - user:emailid - An email address that
         represents a specific account. For example, user:alice@gmail.com or
-        user:joe@example.com.   - serviceAccount:emailid \u2014 An email address
+        user:joe@example.com.   - serviceAccount:emailid - An email address
         that represents a service account. For example,  serviceAccount:my-
-        other-app@appspot.gserviceaccount.com .   - group:emailid \u2014 An email
+        other-app@appspot.gserviceaccount.com .   - group:emailid - An email
         address that represents a Google group. For example,
-        group:admins@example.com.   - domain:domain \u2014 A Google Apps domain
+        group:admins@example.com.   - domain:domain - A Google Apps domain
         name that represents all the users of that domain. For example,
-        domain:google.com or domain:example.com.   - projectOwner:projectid \u2014
+        domain:google.com or domain:example.com.   - projectOwner:projectid -
         Owners of the given project. For example, projectOwner:my-example-
-        project   - projectEditor:projectid \u2014 Editors of the given project.
+        project   - projectEditor:projectid - Editors of the given project.
         For example, projectEditor:my-example-project   -
-        projectViewer:projectid \u2014 Viewers of the given project. For example,
+        projectViewer:projectid - Viewers of the given project. For example,
         projectViewer:my-example-project
       role: The role to which members belong. Two types of roles are
         supported: new IAM roles, which grant permissions that do not map
         directly to those provided by ACLs, and legacy IAM roles, which do map
         directly to ACL permissions. All roles are of the format
         roles/storage.specificRole. The new IAM roles are:   -
-        roles/storage.admin \u2014 Full control of Google Cloud Storage resources.
-        - roles/storage.objectViewer \u2014 Read-Only access to Google Cloud
-        Storage objects.   - roles/storage.objectCreator \u2014 Access to create
-        objects in Google Cloud Storage.   - roles/storage.objectAdmin \u2014 Full
+        roles/storage.admin - Full control of Google Cloud Storage resources.
+        - roles/storage.objectViewer - Read-Only access to Google Cloud
+        Storage objects.   - roles/storage.objectCreator - Access to create
+        objects in Google Cloud Storage.   - roles/storage.objectAdmin - Full
         control of Google Cloud Storage objects.   The legacy IAM roles are:
-        - roles/storage.legacyObjectReader \u2014 Read-only access to objects
+        - roles/storage.legacyObjectReader - Read-only access to objects
         without listing. Equivalent to an ACL entry on an object with the
-        READER role.   - roles/storage.legacyObjectOwner \u2014 Read/write access
+        READER role.   - roles/storage.legacyObjectOwner - Read/write access
         to existing objects without listing. Equivalent to an ACL entry on an
-        object with the OWNER role.   - roles/storage.legacyBucketReader \u2014
+        object with the OWNER role.   - roles/storage.legacyBucketReader -
         Read access to buckets with object listing. Equivalent to an ACL entry
         on a bucket with the READER role.   - roles/storage.legacyBucketWriter
-        \u2014 Read access to buckets with object listing/creation/deletion.
+        - Read access to buckets with object listing/creation/deletion.
         Equivalent to an ACL entry on a bucket with the WRITER role.   -
-        roles/storage.legacyBucketOwner \u2014 Read and write access to existing
+        roles/storage.legacyBucketOwner - Read and write access to existing
         buckets with object listing/creation/deletion. Equivalent to an ACL
         entry on a bucket with the OWNER role.
     """
@@ -786,7 +787,7 @@
 
 
 class RewriteResponse(_messages.Message):
-  """A rewrite response.
+  r"""A rewrite response.
 
   Fields:
     done: true if the copy is finished; otherwise, false if the copy is in
@@ -813,7 +814,7 @@
 
 
 class StandardQueryParameters(_messages.Message):
-  """Query parameters accepted by all methods.
+  r"""Query parameters accepted by all methods.
 
   Enums:
     AltValueValuesEnum: Data format for the response.
@@ -836,7 +837,7 @@
   """
 
   class AltValueValuesEnum(_messages.Enum):
-    """Data format for the response.
+    r"""Data format for the response.
 
     Values:
       json: Responses with Content-Type of application/json
@@ -854,7 +855,7 @@
 
 
 class StorageBucketAccessControlsDeleteRequest(_messages.Message):
-  """A StorageBucketAccessControlsDeleteRequest object.
+  r"""A StorageBucketAccessControlsDeleteRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -868,11 +869,11 @@
 
 
 class StorageBucketAccessControlsDeleteResponse(_messages.Message):
-  """An empty StorageBucketAccessControlsDelete response."""
+  r"""An empty StorageBucketAccessControlsDelete response."""
 
 
 class StorageBucketAccessControlsGetRequest(_messages.Message):
-  """A StorageBucketAccessControlsGetRequest object.
+  r"""A StorageBucketAccessControlsGetRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -886,7 +887,7 @@
 
 
 class StorageBucketAccessControlsListRequest(_messages.Message):
-  """A StorageBucketAccessControlsListRequest object.
+  r"""A StorageBucketAccessControlsListRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -896,7 +897,7 @@
 
 
 class StorageBucketsDeleteRequest(_messages.Message):
-  """A StorageBucketsDeleteRequest object.
+  r"""A StorageBucketsDeleteRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -912,11 +913,11 @@
 
 
 class StorageBucketsDeleteResponse(_messages.Message):
-  """An empty StorageBucketsDelete response."""
+  r"""An empty StorageBucketsDelete response."""
 
 
 class StorageBucketsGetIamPolicyRequest(_messages.Message):
-  """A StorageBucketsGetIamPolicyRequest object.
+  r"""A StorageBucketsGetIamPolicyRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -926,7 +927,7 @@
 
 
 class StorageBucketsGetRequest(_messages.Message):
-  """A StorageBucketsGetRequest object.
+  r"""A StorageBucketsGetRequest object.
 
   Enums:
     ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
@@ -942,7 +943,7 @@
   """
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to noAcl.
+    r"""Set of properties to return. Defaults to noAcl.
 
     Values:
       full: Include all properties.
@@ -958,7 +959,7 @@
 
 
 class StorageBucketsInsertRequest(_messages.Message):
-  """A StorageBucketsInsertRequest object.
+  r"""A StorageBucketsInsertRequest object.
 
   Enums:
     PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
@@ -981,7 +982,7 @@
   """
 
   class PredefinedAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of access controls to this bucket.
+    r"""Apply a predefined set of access controls to this bucket.
 
     Values:
       authenticatedRead: Project team owners get OWNER access, and
@@ -1001,7 +1002,7 @@
     publicReadWrite = 4
 
   class PredefinedDefaultObjectAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of default object access controls to this
+    r"""Apply a predefined set of default object access controls to this
     bucket.
 
     Values:
@@ -1025,7 +1026,7 @@
     publicRead = 5
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to noAcl, unless the bucket
+    r"""Set of properties to return. Defaults to noAcl, unless the bucket
     resource specifies acl or defaultObjectAcl properties, when it defaults to
     full.
 
@@ -1044,7 +1045,7 @@
 
 
 class StorageBucketsListRequest(_messages.Message):
-  """A StorageBucketsListRequest object.
+  r"""A StorageBucketsListRequest object.
 
   Enums:
     ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
@@ -1059,7 +1060,7 @@
   """
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to noAcl.
+    r"""Set of properties to return. Defaults to noAcl.
 
     Values:
       full: Include all properties.
@@ -1076,7 +1077,7 @@
 
 
 class StorageBucketsPatchRequest(_messages.Message):
-  """A StorageBucketsPatchRequest object.
+  r"""A StorageBucketsPatchRequest object.
 
   Enums:
     PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
@@ -1100,7 +1101,7 @@
   """
 
   class PredefinedAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of access controls to this bucket.
+    r"""Apply a predefined set of access controls to this bucket.
 
     Values:
       authenticatedRead: Project team owners get OWNER access, and
@@ -1120,7 +1121,7 @@
     publicReadWrite = 4
 
   class PredefinedDefaultObjectAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of default object access controls to this
+    r"""Apply a predefined set of default object access controls to this
     bucket.
 
     Values:
@@ -1144,7 +1145,7 @@
     publicRead = 5
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to full.
+    r"""Set of properties to return. Defaults to full.
 
     Values:
       full: Include all properties.
@@ -1163,7 +1164,7 @@
 
 
 class StorageBucketsSetIamPolicyRequest(_messages.Message):
-  """A StorageBucketsSetIamPolicyRequest object.
+  r"""A StorageBucketsSetIamPolicyRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1175,7 +1176,7 @@
 
 
 class StorageBucketsTestIamPermissionsRequest(_messages.Message):
-  """A StorageBucketsTestIamPermissionsRequest object.
+  r"""A StorageBucketsTestIamPermissionsRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1187,7 +1188,7 @@
 
 
 class StorageBucketsUpdateRequest(_messages.Message):
-  """A StorageBucketsUpdateRequest object.
+  r"""A StorageBucketsUpdateRequest object.
 
   Enums:
     PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
@@ -1211,7 +1212,7 @@
   """
 
   class PredefinedAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of access controls to this bucket.
+    r"""Apply a predefined set of access controls to this bucket.
 
     Values:
       authenticatedRead: Project team owners get OWNER access, and
@@ -1231,7 +1232,7 @@
     publicReadWrite = 4
 
   class PredefinedDefaultObjectAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of default object access controls to this
+    r"""Apply a predefined set of default object access controls to this
     bucket.
 
     Values:
@@ -1255,7 +1256,7 @@
     publicRead = 5
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to full.
+    r"""Set of properties to return. Defaults to full.
 
     Values:
       full: Include all properties.
@@ -1274,11 +1275,11 @@
 
 
 class StorageChannelsStopResponse(_messages.Message):
-  """An empty StorageChannelsStop response."""
+  r"""An empty StorageChannelsStop response."""
 
 
 class StorageDefaultObjectAccessControlsDeleteRequest(_messages.Message):
-  """A StorageDefaultObjectAccessControlsDeleteRequest object.
+  r"""A StorageDefaultObjectAccessControlsDeleteRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1292,11 +1293,11 @@
 
 
 class StorageDefaultObjectAccessControlsDeleteResponse(_messages.Message):
-  """An empty StorageDefaultObjectAccessControlsDelete response."""
+  r"""An empty StorageDefaultObjectAccessControlsDelete response."""
 
 
 class StorageDefaultObjectAccessControlsGetRequest(_messages.Message):
-  """A StorageDefaultObjectAccessControlsGetRequest object.
+  r"""A StorageDefaultObjectAccessControlsGetRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1310,7 +1311,7 @@
 
 
 class StorageDefaultObjectAccessControlsListRequest(_messages.Message):
-  """A StorageDefaultObjectAccessControlsListRequest object.
+  r"""A StorageDefaultObjectAccessControlsListRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1326,7 +1327,7 @@
 
 
 class StorageNotificationsDeleteRequest(_messages.Message):
-  """A StorageNotificationsDeleteRequest object.
+  r"""A StorageNotificationsDeleteRequest object.
 
   Fields:
     notification: ID of the notification to delete.
@@ -1336,11 +1337,11 @@
 
 
 class StorageNotificationsDeleteResponse(_messages.Message):
-  """An empty StorageNotificationsDelete response."""
+  r"""An empty StorageNotificationsDelete response."""
 
 
 class StorageNotificationsGetRequest(_messages.Message):
-  """A StorageNotificationsGetRequest object.
+  r"""A StorageNotificationsGetRequest object.
 
   Fields:
     notification: Notification ID
@@ -1350,7 +1351,7 @@
 
 
 class StorageNotificationsListRequest(_messages.Message):
-  """A StorageNotificationsListRequest object.
+  r"""A StorageNotificationsListRequest object.
 
   Fields:
     bucket: Name of a GCS bucket.
@@ -1360,7 +1361,7 @@
 
 
 class StorageObjectAccessControlsDeleteRequest(_messages.Message):
-  """A StorageObjectAccessControlsDeleteRequest object.
+  r"""A StorageObjectAccessControlsDeleteRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1380,11 +1381,11 @@
 
 
 class StorageObjectAccessControlsDeleteResponse(_messages.Message):
-  """An empty StorageObjectAccessControlsDelete response."""
+  r"""An empty StorageObjectAccessControlsDelete response."""
 
 
 class StorageObjectAccessControlsGetRequest(_messages.Message):
-  """A StorageObjectAccessControlsGetRequest object.
+  r"""A StorageObjectAccessControlsGetRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1404,7 +1405,7 @@
 
 
 class StorageObjectAccessControlsInsertRequest(_messages.Message):
-  """A StorageObjectAccessControlsInsertRequest object.
+  r"""A StorageObjectAccessControlsInsertRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1423,7 +1424,7 @@
 
 
 class StorageObjectAccessControlsListRequest(_messages.Message):
-  """A StorageObjectAccessControlsListRequest object.
+  r"""A StorageObjectAccessControlsListRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1439,7 +1440,7 @@
 
 
 class StorageObjectAccessControlsPatchRequest(_messages.Message):
-  """A StorageObjectAccessControlsPatchRequest object.
+  r"""A StorageObjectAccessControlsPatchRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1462,7 +1463,7 @@
 
 
 class StorageObjectAccessControlsUpdateRequest(_messages.Message):
-  """A StorageObjectAccessControlsUpdateRequest object.
+  r"""A StorageObjectAccessControlsUpdateRequest object.
 
   Fields:
     bucket: Name of a bucket.
@@ -1485,7 +1486,7 @@
 
 
 class StorageObjectsComposeRequest(_messages.Message):
-  """A StorageObjectsComposeRequest object.
+  r"""A StorageObjectsComposeRequest object.
 
   Enums:
     DestinationPredefinedAclValueValuesEnum: Apply a predefined set of access
@@ -1506,7 +1507,7 @@
   """
 
   class DestinationPredefinedAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of access controls to the destination object.
+    r"""Apply a predefined set of access controls to the destination object.
 
     Values:
       authenticatedRead: Object owner gets OWNER access, and
@@ -1537,7 +1538,7 @@
 
 
 class StorageObjectsCopyRequest(_messages.Message):
-  """A StorageObjectsCopyRequest object.
+  r"""A StorageObjectsCopyRequest object.
 
   Enums:
     DestinationPredefinedAclValueValuesEnum: Apply a predefined set of access
@@ -1585,7 +1586,7 @@
   """
 
   class DestinationPredefinedAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of access controls to the destination object.
+    r"""Apply a predefined set of access controls to the destination object.
 
     Values:
       authenticatedRead: Object owner gets OWNER access, and
@@ -1608,7 +1609,7 @@
     publicRead = 5
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to noAcl, unless the object
+    r"""Set of properties to return. Defaults to noAcl, unless the object
     resource specifies the acl property, when it defaults to full.
 
     Values:
@@ -1637,7 +1638,7 @@
 
 
 class StorageObjectsDeleteRequest(_messages.Message):
-  """A StorageObjectsDeleteRequest object.
+  r"""A StorageObjectsDeleteRequest object.
 
   Fields:
     bucket: Name of the bucket in which the object resides.
@@ -1665,11 +1666,11 @@
 
 
 class StorageObjectsDeleteResponse(_messages.Message):
-  """An empty StorageObjectsDelete response."""
+  r"""An empty StorageObjectsDelete response."""
 
 
 class StorageObjectsGetIamPolicyRequest(_messages.Message):
-  """A StorageObjectsGetIamPolicyRequest object.
+  r"""A StorageObjectsGetIamPolicyRequest object.
 
   Fields:
     bucket: Name of the bucket in which the object resides.
@@ -1685,7 +1686,7 @@
 
 
 class StorageObjectsGetRequest(_messages.Message):
-  """A StorageObjectsGetRequest object.
+  r"""A StorageObjectsGetRequest object.
 
   Enums:
     ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
@@ -1708,7 +1709,7 @@
   """
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to noAcl.
+    r"""Set of properties to return. Defaults to noAcl.
 
     Values:
       full: Include all properties.
@@ -1728,7 +1729,7 @@
 
 
 class StorageObjectsInsertRequest(_messages.Message):
-  """A StorageObjectsInsertRequest object.
+  r"""A StorageObjectsInsertRequest object.
 
   Enums:
     PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
@@ -1764,7 +1765,7 @@
   """
 
   class PredefinedAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of access controls to this object.
+    r"""Apply a predefined set of access controls to this object.
 
     Values:
       authenticatedRead: Object owner gets OWNER access, and
@@ -1787,7 +1788,7 @@
     publicRead = 5
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to noAcl, unless the object
+    r"""Set of properties to return. Defaults to noAcl, unless the object
     resource specifies the acl property, when it defaults to full.
 
     Values:
@@ -1810,7 +1811,7 @@
 
 
 class StorageObjectsListRequest(_messages.Message):
-  """A StorageObjectsListRequest object.
+  r"""A StorageObjectsListRequest object.
 
   Enums:
     ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
@@ -1834,7 +1835,7 @@
   """
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to noAcl.
+    r"""Set of properties to return. Defaults to noAcl.
 
     Values:
       full: Include all properties.
@@ -1853,7 +1854,7 @@
 
 
 class StorageObjectsPatchRequest(_messages.Message):
-  """A StorageObjectsPatchRequest object.
+  r"""A StorageObjectsPatchRequest object.
 
   Enums:
     PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
@@ -1880,7 +1881,7 @@
   """
 
   class PredefinedAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of access controls to this object.
+    r"""Apply a predefined set of access controls to this object.
 
     Values:
       authenticatedRead: Object owner gets OWNER access, and
@@ -1903,7 +1904,7 @@
     publicRead = 5
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to full.
+    r"""Set of properties to return. Defaults to full.
 
     Values:
       full: Include all properties.
@@ -1925,7 +1926,7 @@
 
 
 class StorageObjectsRewriteRequest(_messages.Message):
-  """A StorageObjectsRewriteRequest object.
+  r"""A StorageObjectsRewriteRequest object.
 
   Enums:
     DestinationPredefinedAclValueValuesEnum: Apply a predefined set of access
@@ -1985,7 +1986,7 @@
   """
 
   class DestinationPredefinedAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of access controls to the destination object.
+    r"""Apply a predefined set of access controls to the destination object.
 
     Values:
       authenticatedRead: Object owner gets OWNER access, and
@@ -2008,7 +2009,7 @@
     publicRead = 5
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to noAcl, unless the object
+    r"""Set of properties to return. Defaults to noAcl, unless the object
     resource specifies the acl property, when it defaults to full.
 
     Values:
@@ -2039,7 +2040,7 @@
 
 
 class StorageObjectsSetIamPolicyRequest(_messages.Message):
-  """A StorageObjectsSetIamPolicyRequest object.
+  r"""A StorageObjectsSetIamPolicyRequest object.
 
   Fields:
     bucket: Name of the bucket in which the object resides.
@@ -2057,7 +2058,7 @@
 
 
 class StorageObjectsTestIamPermissionsRequest(_messages.Message):
-  """A StorageObjectsTestIamPermissionsRequest object.
+  r"""A StorageObjectsTestIamPermissionsRequest object.
 
   Fields:
     bucket: Name of the bucket in which the object resides.
@@ -2075,7 +2076,7 @@
 
 
 class StorageObjectsUpdateRequest(_messages.Message):
-  """A StorageObjectsUpdateRequest object.
+  r"""A StorageObjectsUpdateRequest object.
 
   Enums:
     PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
@@ -2102,7 +2103,7 @@
   """
 
   class PredefinedAclValueValuesEnum(_messages.Enum):
-    """Apply a predefined set of access controls to this object.
+    r"""Apply a predefined set of access controls to this object.
 
     Values:
       authenticatedRead: Object owner gets OWNER access, and
@@ -2125,7 +2126,7 @@
     publicRead = 5
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to full.
+    r"""Set of properties to return. Defaults to full.
 
     Values:
       full: Include all properties.
@@ -2147,7 +2148,7 @@
 
 
 class StorageObjectsWatchAllRequest(_messages.Message):
-  """A StorageObjectsWatchAllRequest object.
+  r"""A StorageObjectsWatchAllRequest object.
 
   Enums:
     ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
@@ -2172,7 +2173,7 @@
   """
 
   class ProjectionValueValuesEnum(_messages.Enum):
-    """Set of properties to return. Defaults to noAcl.
+    r"""Set of properties to return. Defaults to noAcl.
 
     Values:
       full: Include all properties.
@@ -2192,24 +2193,24 @@
 
 
 class TestIamPermissionsResponse(_messages.Message):
-  """A storage.(buckets|objects).testIamPermissions response.
+  r"""A storage.(buckets|objects).testIamPermissions response.
 
   Fields:
     kind: The kind of item this is.
     permissions: The permissions held by the caller. Permissions are always of
       the format storage.resource.capability, where resource is one of buckets
       or objects. The supported permissions are as follows:   -
-      storage.buckets.delete \u2014 Delete bucket.   - storage.buckets.get \u2014 Read
-      bucket metadata.   - storage.buckets.getIamPolicy \u2014 Read bucket IAM
-      policy.   - storage.buckets.create \u2014 Create bucket.   -
-      storage.buckets.list \u2014 List buckets.   - storage.buckets.setIamPolicy \u2014
-      Update bucket IAM policy.   - storage.buckets.update \u2014 Update bucket
-      metadata.   - storage.objects.delete \u2014 Delete object.   -
-      storage.objects.get \u2014 Read object data and metadata.   -
-      storage.objects.getIamPolicy \u2014 Read object IAM policy.   -
-      storage.objects.create \u2014 Create object.   - storage.objects.list \u2014 List
-      objects.   - storage.objects.setIamPolicy \u2014 Update object IAM policy.
-      - storage.objects.update \u2014 Update object metadata.
+      storage.buckets.delete - Delete bucket.   - storage.buckets.get - Read
+      bucket metadata.   - storage.buckets.getIamPolicy - Read bucket IAM
+      policy.   - storage.buckets.create - Create bucket.   -
+      storage.buckets.list - List buckets.   - storage.buckets.setIamPolicy -
+      Update bucket IAM policy.   - storage.buckets.update - Update bucket
+      metadata.   - storage.objects.delete - Delete object.   -
+      storage.objects.get - Read object data and metadata.   -
+      storage.objects.getIamPolicy - Read object IAM policy.   -
+      storage.objects.create - Create object.   - storage.objects.list - List
+      objects.   - storage.objects.setIamPolicy - Update object IAM policy.
+      - storage.objects.update - Update object metadata.
   """
 
   kind = _messages.StringField(1, default=u'storage#testIamPermissionsResponse')
diff --git a/samples/storage_sample/uploads_test.py b/samples/storage_sample/uploads_test.py
index cfe6aaa..936fe43 100644
--- a/samples/storage_sample/uploads_test.py
+++ b/samples/storage_sample/uploads_test.py
@@ -27,7 +27,7 @@
 
 import six
 
-import apitools.base.py as apitools_base
+from apitools.base.py import transfer
 import storage
 
 _CLIENT = None
@@ -148,12 +148,12 @@
         # Pretend the process died, and resume with a new attempt at the
         # same upload.
         upload_data = json.dumps(self.__upload.serialization_data)
-        second_upload_attempt = apitools_base.Upload.FromData(
+        second_upload_attempt = transfer.Upload.FromData(
             self.__buffer, upload_data, self.__upload.http)
         second_upload_attempt._Upload__SendChunk(0)
         self.assertEqual(second_upload_attempt.chunksize, self.__buffer.tell())
         # Simulate a third try, and stream from there.
-        final_upload_attempt = apitools_base.Upload.FromData(
+        final_upload_attempt = transfer.Upload.FromData(
             self.__buffer, upload_data, self.__upload.http)
         final_upload_attempt.StreamInChunks()
         self.assertEqual(size, self.__buffer.tell())
@@ -161,7 +161,7 @@
         object_info = self.__client.objects.Get(self.__GetRequest(filename))
         self.assertEqual(size, object_info.size)
         # Confirm that a new attempt successfully does nothing.
-        completed_upload_attempt = apitools_base.Upload.FromData(
+        completed_upload_attempt = transfer.Upload.FromData(
             self.__buffer, upload_data, self.__upload.http)
         self.assertTrue(completed_upload_attempt.complete)
         completed_upload_attempt.StreamInChunks()
diff --git a/samples/uptodate_check_test.py b/samples/uptodate_check_test.py
index 6fbea9c..3871695 100644
--- a/samples/uptodate_check_test.py
+++ b/samples/uptodate_check_test.py
@@ -15,6 +15,7 @@
 import os
 import difflib
 
+import six
 import unittest2
 
 from apitools.gen import gen_client
@@ -30,7 +31,6 @@
         return f.read()
 
 
-@test_utils.RunOnlyOnPython27
 class ClientGenCliTest(unittest2.TestCase):
 
     def AssertDiffEqual(self, expected, actual):
@@ -45,7 +45,6 @@
         with test_utils.TempDir() as tmp_dir_path:
             gen_client.main([
                 gen_client.__file__,
-                '--generate_cli',
                 '--init-file', 'empty',
                 '--infile',
                 GetSampleClientPath(api_name, prefix + '.json'),
@@ -56,11 +55,14 @@
                 'client'
             ])
             expected_files = (
-                set([prefix + '.py']) |  # CLI files
                 set([prefix + '_client.py',
                      prefix + '_messages.py',
                      '__init__.py']))
             self.assertEquals(expected_files, set(os.listdir(tmp_dir_path)))
+            if six.PY3:
+                # The source files won't be identical under python3,
+                # so we exit early.
+                return
             for expected_file in expected_files:
                 self.AssertDiffEqual(
                     _GetContent(GetSampleClientPath(
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..a5ba8ba
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,3 @@
+[pycodestyle]
+count = False
+ignore = E722,E741,W504
diff --git a/setup.py b/setup.py
index 9667697..fbf81b1 100644
--- a/setup.py
+++ b/setup.py
@@ -29,17 +29,16 @@
 # Python version and OS.
 REQUIRED_PACKAGES = [
     'httplib2>=0.8',
-    'oauth2client>=1.5.2,<4.0.0dev',
-    'six>=1.9.0',
+    'fasteners>=0.14',
+    'oauth2client>=1.4.12',
+    'six>=1.12.0',
     ]
 
 CLI_PACKAGES = [
-    'google-apputils>=0.4.0',
-    'python-gflags==3.0.6',  # Starting version 3.0.7 py26 is not supported.
+    'python-gflags>=3.0.6',
 ]
 
 TESTING_PACKAGES = [
-    'google-apputils>=0.4.0',
     'unittest2>=0.5.1',
     'mock>=1.0.1',
 ]
@@ -50,10 +49,7 @@
 
 py_version = platform.python_version()
 
-if py_version < '2.7':
-    REQUIRED_PACKAGES.append('argparse>=1.2.1')
-
-_APITOOLS_VERSION = '0.5.11'
+_APITOOLS_VERSION = '0.5.30'
 
 with open('README.rst') as fileobj:
     README = fileobj.read()
@@ -63,11 +59,11 @@
     version=_APITOOLS_VERSION,
     description='client libraries for humans',
     long_description=README,
-    url='http://github.com/craigcitro/apitools',
+    url='http://github.com/google/apitools',
     author='Craig Citro',
     author_email='craigcitro@google.com',
     # Contained modules and scripts.
-    packages=setuptools.find_packages(),
+    packages=setuptools.find_packages(include=['apitools']),
     entry_points={'console_scripts': CONSOLE_SCRIPTS},
     install_requires=REQUIRED_PACKAGES,
     tests_require=REQUIRED_PACKAGES + CLI_PACKAGES + TESTING_PACKAGES,
@@ -80,6 +76,15 @@
     package_data={
         'apitools.data': ['*'],
     },
+    exclude_package_data={
+        '': [
+            '*_test.py',
+            '*/testing/*',
+            '*/testdata/*',
+            'base/protorpclite/test_util.py',
+            'gen/test_utils.py',
+        ],
+    },
     # PyPI package information.
     classifiers=[
         'License :: OSI Approved :: Apache Software License',
diff --git a/tox.ini b/tox.ini
index e2d1f23..aaa22e0 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,50 +1,32 @@
 [tox]
-envlist = py26,py27,pypy,py34,py35,lint,cover,py27oldoauth2client
+envlist =
+    py26-oauth2client4
+    py27-oauth2client{1,2,3,4}
+    py33-oauth2client41
+    py34-oauth2client41
+    py35-oauth2client{1,2,3,4}
 
 [testenv]
 deps =
     nose
-    python-gflags==3.0.6
+    python-gflags
+    oauth2client1: oauth2client<1.5dev
+    oauth2client2: oauth2client>=2,<=3dev
+    oauth2client3: oauth2client>=3,<=4dev
+    oauth2client4: oauth2client>=4,<=5dev
 commands =
     pip install google-apitools[testing]
     nosetests []
 passenv = TRAVIS*
 
-[testenv:py27oldoauth2client]
-commands =
-    pip install oauth2client==1.5.2
-    {[testenv]commands}
-deps = {[testenv]deps}
-
-[testenv:py34]
-basepython = python3.4
-deps =
-    mock
-    nose
-    unittest2
-commands = nosetests []
-
-[testenv:py35]
-basepython = python3.5
-deps =
-    mock
-    nose
-    unittest2
-commands = nosetests []
-
-[pep8]
-exclude = samples/*_sample/*/*,*/testdata/*,*.egg/,*.egg-info/,.*/,ez_setup.py,build
-verbose = 1
-
 [testenv:lint]
 basepython =
     python2.7
 commands =
-    pip install six google-apitools[testing]
-    pep8
-    python run_pylint.py
+    pip install six google-apitools
+    pycodestyle apitools
 deps =
-    pep8
+    pycodestyle==2.4.0
     pylint
     unittest2
 
@@ -54,8 +36,7 @@
 commands =
     nosetests --with-xunit --with-xcoverage --cover-package=apitools --nocapture --cover-erase --cover-tests --cover-branches []
 deps =
-    google-apputils
-    python-gflags==3.0.6
+    python-gflags
     mock
     nose
     unittest2