Revert "Revert "Upgrade to 5.0.71.48"" DO NOT MERGE

This reverts commit f2e3994fa5148cc3d9946666f0b0596290192b0e,
and updates the x64 makefile properly so it doesn't break that
build.

FPIIM-449

Change-Id: Ib83e35bfbae6af627451c926a9650ec57c045605
(cherry picked from commit 109988c7ccb6f3fd1a58574fa3dfb88beaef6632)
diff --git a/build/android/gyp/aidl.py b/build/android/gyp/aidl.py
new file mode 100755
index 0000000..85ceeae
--- /dev/null
+++ b/build/android/gyp/aidl.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(option_parser)
+  option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+  option_parser.add_option('--imports', help='Files to import.')
+  option_parser.add_option('--includes',
+                           help='Directories to add as import search paths.')
+  option_parser.add_option('--srcjar', help='Path for srcjar output.')
+  options, args = option_parser.parse_args(argv[1:])
+
+  with build_utils.TempDir() as temp_dir:
+    for f in args:
+      classname = os.path.splitext(os.path.basename(f))[0]
+      output = os.path.join(temp_dir, classname + '.java')
+      aidl_cmd = [options.aidl_path]
+      aidl_cmd += [
+        '-p' + s for s in build_utils.ParseGypList(options.imports)
+      ]
+      if options.includes is not None:
+        aidl_cmd += [
+          '-I' + s for s in build_utils.ParseGypList(options.includes)
+        ]
+      aidl_cmd += [
+        f,
+        output
+      ]
+      build_utils.CheckOutput(aidl_cmd)
+
+    with zipfile.ZipFile(options.srcjar, 'w') as srcjar:
+      for path in build_utils.FindInDirectory(temp_dir, '*.java'):
+        with open(path) as fileobj:
+          data = fileobj.read()
+        pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1)
+        arcname = '%s/%s' % (pkg_name.replace('.', '/'), os.path.basename(path))
+        srcjar.writestr(arcname, data)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/ant.py b/build/android/gyp/ant.py
new file mode 100755
index 0000000..5394b9e
--- /dev/null
+++ b/build/android/gyp/ant.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An Ant wrapper that suppresses useless Ant output.
+
+Ant build scripts output "BUILD SUCCESSFUL" and build timing at the end of
+every build. In the Android build, this just adds a lot of useless noise to the
+build output. This script forwards its arguments to ant, and prints Ant's
+output up until the BUILD SUCCESSFUL line.
+
+Also, when a command fails, this script will re-run that ant command with the
+'-verbose' argument so that the failure is easier to debug.
+"""
+
+import optparse
+import sys
+import traceback
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(option_parser)
+  options, args = option_parser.parse_args(argv[1:])
+
+  try:
+    stdout = build_utils.CheckOutput(['ant'] + args)
+  except build_utils.CalledProcessError:
+    # It is very difficult to diagnose ant failures without the '-verbose'
+    # argument. So, when an ant command fails, re-run it with '-verbose' so that
+    # the cause of the failure is easier to identify.
+    verbose_args = ['-verbose'] + [a for a in args if a != '-quiet']
+    try:
+      stdout = build_utils.CheckOutput(['ant'] + verbose_args)
+    except build_utils.CalledProcessError:
+      traceback.print_exc()
+      sys.exit(1)
+
+    # If this did sys.exit(1), building again would succeed (which would be
+    # awkward). Instead, just print a big warning.
+    build_utils.PrintBigWarning(
+        'This is unexpected. `ant ' + ' '.join(args) + '` failed.' +
+        'But, running `ant ' + ' '.join(verbose_args) + '` passed.')
+
+  stdout = stdout.strip().split('\n')
+  for line in stdout:
+    if line.strip() == 'BUILD SUCCESSFUL':
+      break
+    print line
+
+  if options.depfile:
+    assert '-buildfile' in args
+    ant_buildfile = args[args.index('-buildfile') + 1]
+
+    build_utils.WriteDepfile(
+        options.depfile,
+        [ant_buildfile] + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/apk_install.py b/build/android/gyp/apk_install.py
new file mode 100755
index 0000000..9c90763
--- /dev/null
+++ b/build/android/gyp/apk_install.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Installs an APK.
+
+"""
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_device
+from util import build_utils
+from util import md5_check
+
+BUILD_ANDROID_DIR = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..'))
+sys.path.append(BUILD_ANDROID_DIR)
+
+import devil_chromium
+from devil.android import apk_helper
+from pylib import constants
+
+
+def GetNewMetadata(device, apk_package):
+  """Gets the metadata on the device for the apk_package apk."""
+  output = device.RunShellCommand('ls -l /data/app/')
+  # Matches lines like:
+  # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+  # org.chromium.chrome.apk
+  # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+  # org.chromium.chrome-1.apk
+  apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?(.apk)?$' % apk_package, s)
+  matches = filter(apk_matcher, output)
+  return matches[0] if matches else None
+
+def HasInstallMetadataChanged(device, apk_package, metadata_path):
+  """Checks if the metadata on the device for apk_package has changed."""
+  if not os.path.exists(metadata_path):
+    return True
+
+  with open(metadata_path, 'r') as expected_file:
+    return expected_file.read() != device.GetInstallMetadata(apk_package)
+
+
+def RecordInstallMetadata(device, apk_package, metadata_path):
+  """Records the metadata from the device for apk_package."""
+  metadata = GetNewMetadata(device, apk_package)
+  if not metadata:
+    raise Exception('APK install failed unexpectedly.')
+
+  with open(metadata_path, 'w') as outfile:
+    outfile.write(metadata)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--apk-path',
+      help='Path to .apk to install.')
+  parser.add_option('--split-apk-path',
+      help='Path to .apk splits (can specify multiple times, causes '
+      '--install-multiple to be used.',
+      action='append')
+  parser.add_option('--android-sdk-tools',
+      help='Path to the Android SDK build tools folder. ' +
+           'Required when using --split-apk-path.')
+  parser.add_option('--install-record',
+      help='Path to install record (touched only when APK is installed).')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--stamp',
+      help='Path to touch on success.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  parser.add_option('--output-directory',
+      help='The output directory.')
+  options, _ = parser.parse_args()
+
+  constants.SetBuildType(options.configuration_name)
+
+  devil_chromium.Initialize(
+      output_directory=os.path.abspath(options.output_directory))
+
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  serial_number = device.GetSerialNumber()
+  apk_package = apk_helper.GetPackageName(options.apk_path)
+
+  metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number)
+
+  # If the APK on the device does not match the one that was last installed by
+  # the build, then the APK has to be installed (regardless of the md5 record).
+  force_install = HasInstallMetadataChanged(device, apk_package, metadata_path)
+
+
+  def Install():
+    if options.split_apk_path:
+      device.InstallSplitApk(options.apk_path, options.split_apk_path)
+    else:
+      device.Install(options.apk_path, reinstall=True)
+
+    RecordInstallMetadata(device, apk_package, metadata_path)
+    build_utils.Touch(options.install_record)
+
+
+  record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number)
+  md5_check.CallAndRecordIfStale(
+      Install,
+      record_path=record_path,
+      input_paths=[options.apk_path],
+      force=force_install)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/apk_obfuscate.py b/build/android/gyp/apk_obfuscate.py
new file mode 100755
index 0000000..99b6176
--- /dev/null
+++ b/build/android/gyp/apk_obfuscate.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates the obfuscated jar and test jar for an apk.
+
+If proguard is not enabled or 'Release' is not in the configuration name,
+obfuscation will be a no-op.
+"""
+
+import json
+import optparse
+import os
+import sys
+import tempfile
+
+from util import build_utils
+from util import proguard_util
+
+
+_PROGUARD_KEEP_CLASS = '''-keep class %s {
+  *;
+}
+'''
+
+
+def ParseArgs(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--android-sdk-tools',
+                    help='path to the Android SDK build tools folder')
+  parser.add_option('--android-sdk-jar',
+                    help='path to Android SDK\'s android.jar')
+  parser.add_option('--proguard-jar-path',
+                    help='Path to proguard.jar in the sdk')
+  parser.add_option('--input-jars-paths',
+                    help='Path to jars to include in obfuscated jar')
+
+  parser.add_option('--proguard-configs',
+                    help='Paths to proguard config files')
+
+  parser.add_option('--configuration-name',
+                    help='Gyp configuration name (i.e. Debug, Release)')
+
+  parser.add_option('--debug-build-proguard-enabled', action='store_true',
+                    help='--proguard-enabled takes effect on release '
+                         'build, this flag enable the proguard on debug '
+                         'build.')
+  parser.add_option('--proguard-enabled', action='store_true',
+                    help='Set if proguard is enabled for this target.')
+
+  parser.add_option('--obfuscated-jar-path',
+                    help='Output path for obfuscated jar.')
+
+  parser.add_option('--testapp', action='store_true',
+                    help='Set this if building an instrumentation test apk')
+  parser.add_option('--tested-apk-obfuscated-jar-path',
+                    help='Path to obfusctated jar of the tested apk')
+  parser.add_option('--test-jar-path',
+                    help='Output path for jar containing all the test apk\'s '
+                    'code.')
+
+  parser.add_option('--stamp', help='File to touch on success')
+
+  parser.add_option('--main-dex-list-path',
+                    help='The list of classes to retain in the main dex. '
+                         'These will not be obfuscated.')
+  parser.add_option('--multidex-configuration-path',
+                    help='A JSON file containing multidex build configuration.')
+  parser.add_option('--verbose', '-v', action='store_true',
+                    help='Print all proguard output')
+
+  (options, args) = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given. ' + str(args))
+
+  # Check that required options have been provided.
+  required_options = (
+      'android_sdk',
+      'android_sdk_tools',
+      'android_sdk_jar',
+      'proguard_jar_path',
+      'input_jars_paths',
+      'configuration_name',
+      'obfuscated_jar_path',
+      )
+
+  if options.testapp:
+    required_options += (
+        'test_jar_path',
+        )
+
+  build_utils.CheckOptions(options, parser, required=required_options)
+  return options, args
+
+
+def DoProguard(options):
+  proguard = proguard_util.ProguardCmdBuilder(options.proguard_jar_path)
+  proguard.outjar(options.obfuscated_jar_path)
+
+  input_jars = build_utils.ParseGypList(options.input_jars_paths)
+
+  exclude_paths = []
+  configs = build_utils.ParseGypList(options.proguard_configs)
+  if options.tested_apk_obfuscated_jar_path:
+    # configs should only contain the process_resources.py generated config.
+    assert len(configs) == 1, (
+        'test apks should not have custom proguard configs: ' + str(configs))
+    proguard.tested_apk_info(options.tested_apk_obfuscated_jar_path + '.info')
+
+  proguard.libraryjars([options.android_sdk_jar])
+  proguard_injars = [p for p in input_jars if p not in exclude_paths]
+  proguard.injars(proguard_injars)
+
+  multidex_config = _PossibleMultidexConfig(options)
+  if multidex_config:
+    configs.append(multidex_config)
+
+  proguard.configs(configs)
+  proguard.verbose(options.verbose)
+  proguard.CheckOutput()
+
+
+def _PossibleMultidexConfig(options):
+  if not options.multidex_configuration_path:
+    return None
+
+  with open(options.multidex_configuration_path) as multidex_config_file:
+    multidex_config = json.loads(multidex_config_file.read())
+
+  if not (multidex_config.get('enabled') and options.main_dex_list_path):
+    return None
+
+  main_dex_list_config = ''
+  with open(options.main_dex_list_path) as main_dex_list:
+    for clazz in (l.strip() for l in main_dex_list):
+      if clazz.endswith('.class'):
+        clazz = clazz[:-len('.class')]
+      clazz = clazz.replace('/', '.')
+      main_dex_list_config += (_PROGUARD_KEEP_CLASS % clazz)
+  with tempfile.NamedTemporaryFile(
+      delete=False,
+      dir=os.path.dirname(options.main_dex_list_path),
+      prefix='main_dex_list_proguard',
+      suffix='.flags') as main_dex_config_file:
+    main_dex_config_file.write(main_dex_list_config)
+  return main_dex_config_file.name
+
+
+def main(argv):
+  options, _ = ParseArgs(argv)
+
+  input_jars = build_utils.ParseGypList(options.input_jars_paths)
+
+  if options.testapp:
+    dependency_class_filters = [
+        '*R.class', '*R$*.class', '*Manifest.class', '*BuildConfig.class']
+    build_utils.MergeZips(
+        options.test_jar_path, input_jars, dependency_class_filters)
+
+  if ((options.configuration_name == 'Release' and options.proguard_enabled) or
+     (options.configuration_name == 'Debug' and
+      options.debug_build_proguard_enabled)):
+    DoProguard(options)
+  else:
+    output_files = [
+        options.obfuscated_jar_path,
+        options.obfuscated_jar_path + '.info',
+        options.obfuscated_jar_path + '.dump',
+        options.obfuscated_jar_path + '.seeds',
+        options.obfuscated_jar_path + '.usage',
+        options.obfuscated_jar_path + '.mapping']
+    for f in output_files:
+      if os.path.exists(f):
+        os.remove(f)
+      build_utils.Touch(f)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/apkbuilder.py b/build/android/gyp/apkbuilder.py
new file mode 100755
index 0000000..ef5f2cf
--- /dev/null
+++ b/build/android/gyp/apkbuilder.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adds the code parts to a resource APK."""
+
+import argparse
+import itertools
+import os
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+
+
+# Taken from aapt's Package.cpp:
+_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
+                           '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid',
+                           '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf',
+                           '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2',
+                           '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm')
+
+
+def _ParseArgs(args):
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--assets',
+                      help='GYP-list of files to add as assets in the form '
+                           '"srcPath:zipPath", where ":zipPath" is optional.',
+                      default='[]')
+  parser.add_argument('--write-asset-list',
+                      action='store_true',
+                      help='Whether to create an assets/assets_list file.')
+  parser.add_argument('--uncompressed-assets',
+                      help='Same as --assets, except disables compression.',
+                      default='[]')
+  parser.add_argument('--resource-apk',
+                      help='An .ap_ file built using aapt',
+                      required=True)
+  parser.add_argument('--output-apk',
+                      help='Path to the output file',
+                      required=True)
+  parser.add_argument('--dex-file',
+                      help='Path to the classes.dex to use')
+  parser.add_argument('--native-libs',
+                      action='append',
+                      help='GYP-list of native libraries to include. '
+                           'Can be specified multiple times.',
+                      default=[])
+  parser.add_argument('--secondary-native-libs',
+                      action='append',
+                      help='GYP-list of native libraries for secondary '
+                           'android-abi. Can be specified multiple times.',
+                      default=[])
+  parser.add_argument('--android-abi',
+                      help='Android architecture to use for native libraries')
+  parser.add_argument('--secondary-android-abi',
+                      help='The secondary Android architecture to use for'
+                           'secondary native libraries')
+  parser.add_argument('--native-lib-placeholders',
+                      help='GYP-list of native library placeholders to add.',
+                      default='[]')
+  parser.add_argument('--emma-device-jar',
+                      help='Path to emma_device.jar to include.')
+  parser.add_argument('--uncompress-shared-libraries',
+                      action='store_true',
+                      help='Uncompress shared libraries')
+  options = parser.parse_args(args)
+  options.assets = build_utils.ParseGypList(options.assets)
+  options.uncompressed_assets = build_utils.ParseGypList(
+      options.uncompressed_assets)
+  options.native_lib_placeholders = build_utils.ParseGypList(
+      options.native_lib_placeholders)
+  all_libs = []
+  for gyp_list in options.native_libs:
+    all_libs.extend(build_utils.ParseGypList(gyp_list))
+  options.native_libs = all_libs
+  secondary_libs = []
+  for gyp_list in options.secondary_native_libs:
+    secondary_libs.extend(build_utils.ParseGypList(gyp_list))
+  options.secondary_native_libs = secondary_libs
+
+
+  if not options.android_abi and (options.native_libs or
+                                  options.native_lib_placeholders):
+    raise Exception('Must specify --android-abi with --native-libs')
+  if not options.secondary_android_abi and options.secondary_native_libs:
+    raise Exception('Must specify --secondary-android-abi with'
+                    ' --secondary-native-libs')
+  return options
+
+
+def _SplitAssetPath(path):
+  """Returns (src, dest) given an asset path in the form src[:dest]."""
+  path_parts = path.split(':')
+  src_path = path_parts[0]
+  if len(path_parts) > 1:
+    dest_path = path_parts[1]
+  else:
+    dest_path = os.path.basename(src_path)
+  return src_path, dest_path
+
+
+def _ExpandPaths(paths):
+  """Converts src:dst into tuples and enumerates files within directories.
+
+  Args:
+    paths: Paths in the form "src_path:dest_path"
+
+  Returns:
+    A list of (src_path, dest_path) tuples sorted by dest_path (for stable
+    ordering within output .apk).
+  """
+  ret = []
+  for path in paths:
+    src_path, dest_path = _SplitAssetPath(path)
+    if os.path.isdir(src_path):
+      for f in build_utils.FindInDirectory(src_path, '*'):
+        ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:])))
+    else:
+      ret.append((src_path, dest_path))
+  ret.sort(key=lambda t:t[1])
+  return ret
+
+
+def _AddAssets(apk, path_tuples, disable_compression=False):
+  """Adds the given paths to the apk.
+
+  Args:
+    apk: ZipFile to write to.
+    paths: List of paths (with optional :zipPath suffix) to add.
+    disable_compression: Whether to disable compression.
+  """
+  # Group all uncompressed assets together in the hope that it will increase
+  # locality of mmap'ed files.
+  for target_compress in (False, True):
+    for src_path, dest_path in path_tuples:
+
+      compress = not disable_compression and (
+          os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS)
+      if target_compress == compress:
+        apk_path = 'assets/' + dest_path
+        try:
+          apk.getinfo(apk_path)
+          # Should never happen since write_build_config.py handles merging.
+          raise Exception('Multiple targets specified the asset path: %s' %
+                          apk_path)
+        except KeyError:
+          build_utils.AddToZipHermetic(apk, apk_path, src_path=src_path,
+                                       compress=compress)
+
+
+def _CreateAssetsList(path_tuples):
+  """Returns a newline-separated list of asset paths for the given paths."""
+  dests = sorted(t[1] for t in path_tuples)
+  return '\n'.join(dests) + '\n'
+
+
+def _AddNativeLibraries(out_apk, native_libs, android_abi, uncompress):
+  """Add native libraries to APK."""
+  for path in native_libs:
+    basename = os.path.basename(path)
+    apk_path = 'lib/%s/%s' % (android_abi, basename)
+
+    compress = None
+    if (uncompress and os.path.splitext(basename)[1] == '.so'):
+      compress = False
+
+    build_utils.AddToZipHermetic(out_apk,
+                                 apk_path,
+                                 src_path=path,
+                                 compress=compress)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  native_libs = sorted(options.native_libs)
+
+  input_paths = [options.resource_apk, __file__] + native_libs
+
+  secondary_native_libs = []
+  if options.secondary_native_libs:
+    secondary_native_libs = sorted(options.secondary_native_libs)
+    input_paths += secondary_native_libs
+
+  if options.dex_file:
+    input_paths.append(options.dex_file)
+
+  if options.emma_device_jar:
+    input_paths.append(options.emma_device_jar)
+
+  input_strings = [options.android_abi,
+                   options.native_lib_placeholders,
+                   options.uncompress_shared_libraries]
+
+  if options.secondary_android_abi:
+    input_strings.append(options.secondary_android_abi)
+
+  _assets = _ExpandPaths(options.assets)
+  _uncompressed_assets = _ExpandPaths(options.uncompressed_assets)
+
+  for src_path, dest_path in itertools.chain(_assets, _uncompressed_assets):
+    input_paths.append(src_path)
+    input_strings.append(dest_path)
+
+  def on_stale_md5():
+    tmp_apk = options.output_apk + '.tmp'
+    try:
+      # TODO(agrieve): It would be more efficient to combine this step
+      # with finalize_apk(), which sometimes aligns and uncompresses the
+      # native libraries.
+      with zipfile.ZipFile(options.resource_apk) as resource_apk, \
+           zipfile.ZipFile(tmp_apk, 'w', zipfile.ZIP_DEFLATED) as out_apk:
+        def copy_resource(zipinfo):
+          compress = zipinfo.compress_type != zipfile.ZIP_STORED
+          build_utils.AddToZipHermetic(out_apk, zipinfo.filename,
+                                       data=resource_apk.read(zipinfo.filename),
+                                       compress=compress)
+
+        # Make assets come before resources in order to maintain the same file
+        # ordering as GYP / aapt. http://crbug.com/561862
+        resource_infos = resource_apk.infolist()
+
+        # 1. AndroidManifest.xml
+        assert resource_infos[0].filename == 'AndroidManifest.xml'
+        copy_resource(resource_infos[0])
+
+        # 2. Assets
+        if options.write_asset_list:
+          data = _CreateAssetsList(
+              itertools.chain(_assets, _uncompressed_assets))
+          build_utils.AddToZipHermetic(out_apk, 'assets/assets_list', data=data)
+
+        _AddAssets(out_apk, _assets, disable_compression=False)
+        _AddAssets(out_apk, _uncompressed_assets, disable_compression=True)
+
+        # 3. Dex files
+        if options.dex_file and options.dex_file.endswith('.zip'):
+          with zipfile.ZipFile(options.dex_file, 'r') as dex_zip:
+            for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
+              build_utils.AddToZipHermetic(out_apk, dex, data=dex_zip.read(dex))
+        elif options.dex_file:
+          build_utils.AddToZipHermetic(out_apk, 'classes.dex',
+                                       src_path=options.dex_file)
+
+        # 4. Native libraries.
+        _AddNativeLibraries(out_apk,
+                            native_libs,
+                            options.android_abi,
+                            options.uncompress_shared_libraries)
+
+        if options.secondary_android_abi:
+          _AddNativeLibraries(out_apk,
+                              secondary_native_libs,
+                              options.secondary_android_abi,
+                              options.uncompress_shared_libraries)
+
+        for name in sorted(options.native_lib_placeholders):
+          # Empty libs files are ignored by md5check, but rezip requires them
+          # to be empty in order to identify them as placeholders.
+          apk_path = 'lib/%s/%s' % (options.android_abi, name)
+          build_utils.AddToZipHermetic(out_apk, apk_path, data='')
+
+        # 5. Resources
+        for info in resource_infos[1:]:
+          copy_resource(info)
+
+        # 6. Java resources. Used only when coverage is enabled, so order
+        # doesn't matter).
+        if options.emma_device_jar:
+          # Add EMMA Java resources to APK.
+          with zipfile.ZipFile(options.emma_device_jar, 'r') as emma_device_jar:
+            for apk_path in emma_device_jar.namelist():
+              apk_path_lower = apk_path.lower()
+              if apk_path_lower.startswith('meta-inf/'):
+                continue
+
+              if apk_path_lower.endswith('/'):
+                continue
+
+              if apk_path_lower.endswith('.class'):
+                continue
+
+              build_utils.AddToZipHermetic(out_apk, apk_path,
+                                           data=emma_device_jar.read(apk_path))
+
+      shutil.move(tmp_apk, options.output_apk)
+    finally:
+      if os.path.exists(tmp_apk):
+        os.unlink(tmp_apk)
+
+  build_utils.CallAndWriteDepfileIfStale(
+      on_stale_md5,
+      options,
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=[options.output_apk])
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/build/android/gyp/configure_multidex.py b/build/android/gyp/configure_multidex.py
new file mode 100755
index 0000000..9f3b736
--- /dev/null
+++ b/build/android/gyp/configure_multidex.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import json
+import os
+import sys
+
+from util import build_utils
+
+
+_GCC_PREPROCESS_PATH = os.path.join(
+    os.path.dirname(__file__), 'gcc_preprocess.py')
+
+
+def ParseArgs():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--configuration-name', required=True,
+                      help='The build CONFIGURATION_NAME.')
+  parser.add_argument('--enable-multidex', action='store_true', default=False,
+                      help='If passed, multidex may be enabled.')
+  parser.add_argument('--enabled-configurations', default=[],
+                      help='The configuration(s) for which multidex should be '
+                           'enabled. If not specified and --enable-multidex is '
+                           'passed, multidex will be enabled for all '
+                           'configurations.')
+  parser.add_argument('--multidex-configuration-path', required=True,
+                      help='The path to which the multidex configuration JSON '
+                           'should be saved.')
+  parser.add_argument('--multidex-config-java-file', required=True)
+  parser.add_argument('--multidex-config-java-stamp', required=True)
+  parser.add_argument('--multidex-config-java-template', required=True)
+
+  args = parser.parse_args()
+
+  if args.enabled_configurations:
+    args.enabled_configurations = build_utils.ParseGypList(
+        args.enabled_configurations)
+
+  return args
+
+
+def _WriteConfigJson(multidex_enabled, multidex_configuration_path):
+  config = {
+    'enabled': multidex_enabled,
+  }
+
+  with open(multidex_configuration_path, 'w') as f:
+    f.write(json.dumps(config))
+
+
+def _GenerateMultidexConfigJava(multidex_enabled, args):
+  gcc_preprocess_cmd = [
+    sys.executable, _GCC_PREPROCESS_PATH,
+    '--include-path=',
+    '--template', args.multidex_config_java_template,
+    '--stamp', args.multidex_config_java_stamp,
+    '--output', args.multidex_config_java_file,
+  ]
+  if multidex_enabled:
+    gcc_preprocess_cmd += [
+      '--defines', 'ENABLE_MULTIDEX',
+    ]
+
+  build_utils.CheckOutput(gcc_preprocess_cmd)
+
+
+def main():
+  args = ParseArgs()
+
+  multidex_enabled = (
+      args.enable_multidex
+      and (not args.enabled_configurations
+           or args.configuration_name in args.enabled_configurations))
+
+  _WriteConfigJson(multidex_enabled, args.multidex_configuration_path)
+  _GenerateMultidexConfigJava(multidex_enabled, args)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/copy_ex.py b/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000..3d7434d
--- /dev/null
+++ b/build/android/gyp/copy_ex.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+import itertools
+import optparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def _get_all_files(base):
+  """Returns a list of all the files in |base|. Each entry is relative to the
+  last path entry of |base|."""
+  result = []
+  dirname = os.path.dirname(base)
+  for root, _, files in os.walk(base):
+    result.extend([os.path.join(root[len(dirname):], f) for f in files])
+  return result
+
+def CopyFile(f, dest, deps):
+  """Copy file or directory and update deps."""
+  if os.path.isdir(f):
+    shutil.copytree(f, os.path.join(dest, os.path.basename(f)))
+    deps.extend(_get_all_files(f))
+  else:
+    shutil.copy(f, dest)
+    deps.append(f)
+
+def DoCopy(options, deps):
+  """Copy files or directories given in options.files and update deps."""
+  files = list(itertools.chain.from_iterable(build_utils.ParseGypList(f)
+                                             for f in options.files))
+
+  for f in files:
+    if os.path.isdir(f) and not options.clear:
+      print ('To avoid stale files you must use --clear when copying '
+             'directories')
+      sys.exit(-1)
+    CopyFile(f, options.dest, deps)
+
+def DoRenaming(options, deps):
+  """Copy and rename files given in options.renaming_sources and update deps."""
+  src_files = list(itertools.chain.from_iterable(
+                   build_utils.ParseGypList(f)
+                   for f in options.renaming_sources))
+
+  dest_files = list(itertools.chain.from_iterable(
+                    build_utils.ParseGypList(f)
+                    for f in options.renaming_destinations))
+
+  if (len(src_files) != len(dest_files)):
+    print('Renaming source and destination files not match.')
+    sys.exit(-1)
+
+  for src, dest in itertools.izip(src_files, dest_files):
+    if os.path.isdir(src):
+      print ('renaming diretory is not supported.')
+      sys.exit(-1)
+    else:
+      CopyFile(src, os.path.join(options.dest, dest), deps)
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--dest', help='Directory to copy files to.')
+  parser.add_option('--files', action='append',
+                    help='List of files to copy.')
+  parser.add_option('--clear', action='store_true',
+                    help='If set, the destination directory will be deleted '
+                    'before copying files to it. This is highly recommended to '
+                    'ensure that no stale files are left in the directory.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--renaming-sources',
+                    action='append',
+                    help='List of files need to be renamed while being '
+                         'copied to dest directory')
+  parser.add_option('--renaming-destinations',
+                    action='append',
+                    help='List of destination file name without path, the '
+                         'number of elements must match rename-sources.')
+
+  options, _ = parser.parse_args(args)
+
+  if options.clear:
+    build_utils.DeleteDirectory(options.dest)
+    build_utils.MakeDirectory(options.dest)
+
+  deps = []
+
+  if options.files:
+    DoCopy(options, deps)
+
+  if options.renaming_sources:
+    DoRenaming(options, deps)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        deps + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
+
diff --git a/build/android/gyp/create_device_library_links.py b/build/android/gyp/create_device_library_links.py
new file mode 100755
index 0000000..c7f59dd
--- /dev/null
+++ b/build/android/gyp/create_device_library_links.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates symlinks to native libraries for an APK.
+
+The native libraries should have previously been pushed to the device (in
+options.target_dir). This script then creates links in an apk's lib/ folder to
+those native libraries.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_device
+from util import build_utils
+
+BUILD_ANDROID_DIR = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..'))
+sys.path.append(BUILD_ANDROID_DIR)
+
+import devil_chromium
+from devil.android import apk_helper
+from pylib import constants
+
+def RunShellCommand(device, cmd):
+  output = device.RunShellCommand(cmd, check_return=True)
+
+  if output:
+    raise Exception(
+        'Unexpected output running command: ' + cmd + '\n' +
+        '\n'.join(output))
+
+
+def CreateSymlinkScript(options):
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  link_cmd = (
+      'rm $APK_LIBRARIES_DIR/%(lib_basename)s > /dev/null 2>&1 \n'
+      'ln -s $STRIPPED_LIBRARIES_DIR/%(lib_basename)s '
+        '$APK_LIBRARIES_DIR/%(lib_basename)s \n'
+      )
+
+  script = '#!/bin/sh \n'
+
+  for lib in libraries:
+    script += link_cmd % { 'lib_basename': lib }
+
+  with open(options.script_host_path, 'w') as scriptfile:
+    scriptfile.write(script)
+
+
+def TriggerSymlinkScript(options):
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  apk_package = apk_helper.GetPackageName(options.apk)
+  apk_libraries_dir = '/data/data/%s/lib' % apk_package
+
+  device_dir = os.path.dirname(options.script_device_path)
+  mkdir_cmd = ('if [ ! -e %(dir)s ]; then mkdir -p %(dir)s; fi ' %
+      { 'dir': device_dir })
+  RunShellCommand(device, mkdir_cmd)
+  device.PushChangedFiles([(os.path.abspath(options.script_host_path),
+                            options.script_device_path)])
+
+  trigger_cmd = (
+      'APK_LIBRARIES_DIR=%(apk_libraries_dir)s; '
+      'STRIPPED_LIBRARIES_DIR=%(target_dir)s; '
+      '. %(script_device_path)s'
+      ) % {
+          'apk_libraries_dir': apk_libraries_dir,
+          'target_dir': options.target_dir,
+          'script_device_path': options.script_device_path
+          }
+  RunShellCommand(device, trigger_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  parser.add_option('--apk', help='Path to the apk.')
+  parser.add_option('--script-host-path',
+      help='Path on the host for the symlink script.')
+  parser.add_option('--script-device-path',
+      help='Path on the device to push the created symlink script.')
+  parser.add_option('--libraries',
+      help='List of native libraries.')
+  parser.add_option('--target-dir',
+      help='Device directory that contains the target libraries for symlinks.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  parser.add_option('--output-directory',
+      help='The output directory')
+  options, _ = parser.parse_args(args)
+
+  required_options = ['apk', 'libraries', 'script_host_path',
+      'script_device_path', 'target_dir', 'configuration_name']
+  build_utils.CheckOptions(options, parser, required=required_options)
+  constants.SetBuildType(options.configuration_name)
+
+  devil_chromium.Initialize(
+      output_directory=os.path.abspath(options.output_directory))
+
+  CreateSymlinkScript(options)
+  TriggerSymlinkScript(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_dist_jar.py b/build/android/gyp/create_dist_jar.py
new file mode 100755
index 0000000..0d31c5d
--- /dev/null
+++ b/build/android/gyp/create_dist_jar.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merges a list of jars into a single jar."""
+
+import optparse
+import sys
+
+from util import build_utils
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--output', help='Path to output jar.')
+  parser.add_option('--inputs', action='append', help='List of jar inputs.')
+  options, _ = parser.parse_args(args)
+  build_utils.CheckOptions(options, parser, ['output', 'inputs'])
+
+  input_jars = []
+  for inputs_arg in options.inputs:
+    input_jars.extend(build_utils.ParseGypList(inputs_arg))
+
+  build_utils.MergeZips(options.output, input_jars)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        input_jars + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_java_binary_script.py b/build/android/gyp/create_java_binary_script.py
new file mode 100755
index 0000000..2b6553d
--- /dev/null
+++ b/build/android/gyp/create_java_binary_script.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple script to run a java "binary".
+
+This creates a script that sets up the java command line for running a java
+jar. This includes correctly setting the classpath and the main class.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# The java command must be executed in the current directory because there may
+# be user-supplied paths in the args. The script receives the classpath relative
+# to the directory that the script is written in and then, when run, must
+# recalculate the paths relative to the current directory.
+script_template = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_java_binary_script.py
+
+import os
+import sys
+
+self_dir = os.path.dirname(__file__)
+classpath = [{classpath}]
+bootclasspath = [{bootclasspath}]
+extra_program_args = {extra_program_args}
+if os.getcwd() != self_dir:
+  offset = os.path.relpath(self_dir, os.getcwd())
+  classpath = [os.path.join(offset, p) for p in classpath]
+  bootclasspath = [os.path.join(offset, p) for p in bootclasspath]
+java_cmd = ["java"]
+{noverify_flag}
+if bootclasspath:
+    java_cmd.append("-Xbootclasspath/p:" + ":".join(bootclasspath))
+java_cmd.extend(
+    ["-classpath", ":".join(classpath), "-enableassertions", \"{main_class}\"])
+java_cmd.extend(extra_program_args)
+java_cmd.extend(sys.argv[1:])
+os.execvp("java", java_cmd)
+"""
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--output', help='Output path for executable script.')
+  parser.add_option('--jar-path', help='Path to the main jar.')
+  parser.add_option('--main-class',
+      help='Name of the java class with the "main" entry point.')
+  parser.add_option('--classpath', action='append', default=[],
+      help='Classpath for running the jar.')
+  parser.add_option('--bootclasspath', action='append', default=[],
+      help='zip/jar files to add to bootclasspath for java cmd.')
+  parser.add_option('--noverify', action='store_true',
+      help='JVM flag: noverify.')
+
+  options, extra_program_args = parser.parse_args(argv)
+
+  if (options.noverify):
+    noverify_flag = 'java_cmd.append("-noverify")'
+  else:
+    noverify_flag = ''
+
+  classpath = [options.jar_path]
+  for cp_arg in options.classpath:
+    classpath += build_utils.ParseGypList(cp_arg)
+
+  bootclasspath = []
+  for bootcp_arg in options.bootclasspath:
+    bootclasspath += build_utils.ParseGypList(bootcp_arg)
+
+  run_dir = os.path.dirname(options.output)
+  bootclasspath = [os.path.relpath(p, run_dir) for p in bootclasspath]
+  classpath = [os.path.relpath(p, run_dir) for p in classpath]
+
+  with open(options.output, 'w') as script:
+    script.write(script_template.format(
+      classpath=('"%s"' % '", "'.join(classpath)),
+      bootclasspath=('"%s"' % '", "'.join(bootclasspath)
+                     if bootclasspath else ''),
+      main_class=options.main_class,
+      extra_program_args=repr(extra_program_args),
+      noverify_flag=noverify_flag))
+
+  os.chmod(options.output, 0750)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_placeholder_files.py b/build/android/gyp/create_placeholder_files.py
new file mode 100755
index 0000000..103e1df
--- /dev/null
+++ b/build/android/gyp/create_placeholder_files.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create placeholder files.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '--dest-lib-dir',
+      help='Destination directory to have placeholder files.')
+  parser.add_option(
+      '--stamp',
+      help='Path to touch on success')
+
+  options, args = parser.parse_args()
+
+  for name in args:
+    target_path = os.path.join(options.dest_lib_dir, name)
+    build_utils.Touch(target_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/create_standalone_apk.py b/build/android/gyp/create_standalone_apk.py
new file mode 100755
index 0000000..c560599
--- /dev/null
+++ b/build/android/gyp/create_standalone_apk.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Combines stripped libraries and incomplete APK into single standalone APK.
+
+"""
+
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+from util import md5_check
+
+def CreateStandaloneApk(options):
+  def DoZip():
+    with tempfile.NamedTemporaryFile(suffix='.zip') as intermediate_file:
+      intermediate_path = intermediate_file.name
+      shutil.copy(options.input_apk_path, intermediate_path)
+      apk_path_abs = os.path.abspath(intermediate_path)
+      build_utils.CheckOutput(
+          ['zip', '-r', '-1', apk_path_abs, 'lib'],
+          cwd=options.libraries_top_dir)
+      shutil.copy(intermediate_path, options.output_apk_path)
+
+  input_paths = [options.input_apk_path, options.libraries_top_dir]
+  record_path = '%s.standalone.stamp' % options.input_apk_path
+  md5_check.CallAndRecordIfStale(
+      DoZip,
+      record_path=record_path,
+      input_paths=input_paths)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--libraries-top-dir',
+      help='Top directory that contains libraries '
+      '(i.e. library paths are like '
+      'libraries_top_dir/lib/android_app_abi/foo.so).')
+  parser.add_option('--input-apk-path', help='Path to incomplete APK.')
+  parser.add_option('--output-apk-path', help='Path for standalone APK.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  options, _ = parser.parse_args()
+
+  required_options = ['libraries_top_dir', 'input_apk_path', 'output_apk_path']
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  CreateStandaloneApk(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/create_test_runner_script.py b/build/android/gyp/create_test_runner_script.py
new file mode 100755
index 0000000..be15dfd
--- /dev/null
+++ b/build/android/gyp/create_test_runner_script.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a script to run an android test using build/android/test_runner.py.
+"""
+
+import argparse
+import os
+import sys
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_test_runner_script.py
+
+import os
+import subprocess
+import sys
+
+def main():
+  script_directory = os.path.dirname(__file__)
+
+  def ResolvePath(path):
+    \"\"\"Returns an absolute filepath given a path relative to this script.
+    \"\"\"
+    return os.path.abspath(os.path.join(script_directory, path))
+
+  test_runner_path = ResolvePath('{test_runner_path}')
+  test_runner_args = {test_runner_args}
+  test_runner_path_args = {test_runner_path_args}
+  for arg, path in test_runner_path_args:
+    test_runner_args.extend([arg, ResolvePath(path)])
+
+  test_runner_cmd = [test_runner_path] + test_runner_args + sys.argv[1:]
+  return subprocess.call(test_runner_cmd)
+
+if __name__ == '__main__':
+  sys.exit(main())
+"""
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--script-output-path',
+                      help='Output path for executable script.')
+  parser.add_argument('--depfile',
+                      help='Path to the depfile. This must be specified as '
+                           "the action's first output.")
+  parser.add_argument('--test-runner-path',
+                      help='Path to test_runner.py (optional).')
+  # We need to intercept any test runner path arguments and make all
+  # of the paths relative to the output script directory.
+  group = parser.add_argument_group('Test runner path arguments.')
+  group.add_argument('--additional-apk', action='append',
+                     dest='additional_apks', default=[])
+  group.add_argument('--additional-apk-list')
+  group.add_argument('--apk-under-test')
+  group.add_argument('--apk-under-test-incremental-install-script')
+  group.add_argument('--executable-dist-dir')
+  group.add_argument('--isolate-file-path')
+  group.add_argument('--output-directory')
+  group.add_argument('--test-apk')
+  group.add_argument('--test-apk-incremental-install-script')
+  group.add_argument('--coverage-dir')
+  args, test_runner_args = parser.parse_known_args(
+      build_utils.ExpandFileArgs(args))
+
+  def RelativizePathToScript(path):
+    """Returns the path relative to the output script directory."""
+    return os.path.relpath(path, os.path.dirname(args.script_output_path))
+
+  test_runner_path = args.test_runner_path or os.path.join(
+      os.path.dirname(__file__), os.path.pardir, 'test_runner.py')
+  test_runner_path = RelativizePathToScript(test_runner_path)
+
+  test_runner_path_args = []
+  if args.additional_apk_list:
+    args.additional_apks.extend(
+        build_utils.ParseGypList(args.additional_apk_list))
+  if args.additional_apks:
+    test_runner_path_args.extend(
+        ('--additional-apk', RelativizePathToScript(a))
+        for a in args.additional_apks)
+  if args.apk_under_test:
+    test_runner_path_args.append(
+        ('--apk-under-test', RelativizePathToScript(args.apk_under_test)))
+  if args.apk_under_test_incremental_install_script:
+    test_runner_path_args.append(
+        ('--apk-under-test-incremental-install-script',
+         RelativizePathToScript(
+             args.apk_under_test_incremental_install_script)))
+  if args.executable_dist_dir:
+    test_runner_path_args.append(
+        ('--executable-dist-dir',
+         RelativizePathToScript(args.executable_dist_dir)))
+  if args.isolate_file_path:
+    test_runner_path_args.append(
+        ('--isolate-file-path', RelativizePathToScript(args.isolate_file_path)))
+  if args.output_directory:
+    test_runner_path_args.append(
+        ('--output-directory', RelativizePathToScript(args.output_directory)))
+  if args.test_apk:
+    test_runner_path_args.append(
+        ('--test-apk', RelativizePathToScript(args.test_apk)))
+  if args.test_apk_incremental_install_script:
+    test_runner_path_args.append(
+        ('--test-apk-incremental-install-script',
+         RelativizePathToScript(args.test_apk_incremental_install_script)))
+  if args.coverage_dir:
+    test_runner_path_args.append(
+        ('--coverage-dir', RelativizePathToScript(args.coverage_dir)))
+
+  with open(args.script_output_path, 'w') as script:
+    script.write(SCRIPT_TEMPLATE.format(
+        test_runner_path=str(test_runner_path),
+        test_runner_args=str(test_runner_args),
+        test_runner_path_args=str(test_runner_path_args)))
+
+  os.chmod(args.script_output_path, 0750)
+
+  if args.depfile:
+    build_utils.WriteDepfile(
+        args.depfile,
+        build_utils.GetPythonDependencies())
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_tool_wrapper.py b/build/android/gyp/create_tool_wrapper.py
new file mode 100755
index 0000000..4433004
--- /dev/null
+++ b/build/android/gyp/create_tool_wrapper.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple wrapper script that passes the correct --output-directory.
+"""
+
+import argparse
+import os
+
+_TEMPLATE = """\
+#!/usr/bin/env python
+#
+# This file was generated by //build/android/gyp/create_tool_script.py
+
+import os
+import sys
+
+cmd = '{cmd}'
+args = [os.path.basename(cmd), '{flag_name}={output_directory}'] + sys.argv[1:]
+os.execv(cmd, args)
+"""
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--output', help='Output path for executable script.')
+  parser.add_argument('--target', help='Path to script being wrapped.')
+  parser.add_argument('--output-directory', help='Value for --output-directory')
+  parser.add_argument('--flag-name',
+                      help='Flag name to use instead of --output-directory',
+                      default='--output-directory')
+  args = parser.parse_args()
+
+  with open(args.output, 'w') as script:
+    script.write(_TEMPLATE.format(
+        cmd=os.path.abspath(args.target),
+        flag_name=args.flag_name,
+        output_directory=os.path.abspath(args.output_directory)))
+
+  os.chmod(args.output, 0750)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/dex.py b/build/android/gyp/dex.py
new file mode 100755
index 0000000..9400ff2
--- /dev/null
+++ b/build/android/gyp/dex.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+import optparse
+import os
+import sys
+import tempfile
+import zipfile
+
+from util import build_utils
+
+
+def _RemoveUnwantedFilesFromZip(dex_path):
+  iz = zipfile.ZipFile(dex_path, 'r')
+  tmp_dex_path = '%s.tmp.zip' % dex_path
+  oz = zipfile.ZipFile(tmp_dex_path, 'w', zipfile.ZIP_DEFLATED)
+  for i in iz.namelist():
+    if i.endswith('.dex'):
+      oz.writestr(i, iz.read(i))
+  os.remove(dex_path)
+  os.rename(tmp_dex_path, dex_path)
+
+
+def _ParseArgs(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-sdk-tools',
+                    help='Android sdk build tools directory.')
+  parser.add_option('--output-directory',
+                    default=os.getcwd(),
+                    help='Path to the output build directory.')
+  parser.add_option('--dex-path', help='Dex output path.')
+  parser.add_option('--configuration-name',
+                    help='The build CONFIGURATION_NAME.')
+  parser.add_option('--proguard-enabled',
+                    help='"true" if proguard is enabled.')
+  parser.add_option('--debug-build-proguard-enabled',
+                    help='"true" if proguard is enabled for debug build.')
+  parser.add_option('--proguard-enabled-input-path',
+                    help=('Path to dex in Release mode when proguard '
+                          'is enabled.'))
+  parser.add_option('--no-locals', default='0',
+                    help='Exclude locals list from the dex file.')
+  parser.add_option('--incremental',
+                    action='store_true',
+                    help='Enable incremental builds when possible.')
+  parser.add_option('--inputs', help='A list of additional input paths.')
+  parser.add_option('--excluded-paths',
+                    help='A list of paths to exclude from the dex file.')
+  parser.add_option('--main-dex-list-path',
+                    help='A file containing a list of the classes to '
+                         'include in the main dex.')
+  parser.add_option('--multidex-configuration-path',
+                    help='A JSON file containing multidex build configuration.')
+  parser.add_option('--multi-dex', default=False, action='store_true',
+                    help='Generate multiple dex files.')
+
+  options, paths = parser.parse_args(args)
+
+  required_options = ('android_sdk_tools',)
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  if options.multidex_configuration_path:
+    with open(options.multidex_configuration_path) as multidex_config_file:
+      multidex_config = json.loads(multidex_config_file.read())
+    options.multi_dex = multidex_config.get('enabled', False)
+
+  if options.multi_dex and not options.main_dex_list_path:
+    logging.warning('multidex cannot be enabled without --main-dex-list-path')
+    options.multi_dex = False
+  elif options.main_dex_list_path and not options.multi_dex:
+    logging.warning('--main-dex-list-path is unused if multidex is not enabled')
+
+  if options.inputs:
+    options.inputs = build_utils.ParseGypList(options.inputs)
+  if options.excluded_paths:
+    options.excluded_paths = build_utils.ParseGypList(options.excluded_paths)
+
+  return options, paths
+
+
+def _AllSubpathsAreClassFiles(paths, changes):
+  for path in paths:
+    if any(not p.endswith('.class') for p in changes.IterChangedSubpaths(path)):
+      return False
+  return True
+
+
+def _DexWasEmpty(paths, changes):
+  for path in paths:
+    if any(p.endswith('.class')
+           for p in changes.old_metadata.IterSubpaths(path)):
+      return False
+  return True
+
+
+def _RunDx(changes, options, dex_cmd, paths):
+  with build_utils.TempDir() as classes_temp_dir:
+    # --multi-dex is incompatible with --incremental.
+    if options.multi_dex:
+      dex_cmd.append('--main-dex-list=%s' % options.main_dex_list_path)
+    else:
+      # Use --incremental when .class files are added or modified (never when
+      # removed).
+      # --incremental tells dx to merge all newly dex'ed .class files with
+      # what that already exist in the output dex file (existing classes are
+      # replaced).
+      if options.incremental and changes.AddedOrModifiedOnly():
+        changed_inputs = set(changes.IterChangedPaths())
+        changed_paths = [p for p in paths if p in changed_inputs]
+        if not changed_paths:
+          return
+        # When merging in other dex files, there's no easy way to know if
+        # classes were removed from them.
+        if (_AllSubpathsAreClassFiles(changed_paths, changes)
+            and not _DexWasEmpty(changed_paths, changes)):
+          dex_cmd.append('--incremental')
+          for path in changed_paths:
+            changed_subpaths = set(changes.IterChangedSubpaths(path))
+            # Not a fundamental restriction, but it's the case right now and it
+            # simplifies the logic to assume so.
+            assert changed_subpaths, 'All inputs should be zip files.'
+            build_utils.ExtractAll(path, path=classes_temp_dir,
+                                   predicate=lambda p: p in changed_subpaths)
+          paths = [classes_temp_dir]
+
+    dex_cmd += paths
+    build_utils.CheckOutput(dex_cmd, print_stderr=False)
+
+  if options.dex_path.endswith('.zip'):
+    _RemoveUnwantedFilesFromZip(options.dex_path)
+
+
+def _OnStaleMd5(changes, options, dex_cmd, paths):
+  _RunDx(changes, options, dex_cmd, paths)
+  build_utils.WriteJson(
+      [os.path.relpath(p, options.output_directory) for p in paths],
+      options.dex_path + '.inputs')
+
+
+def main(args):
+  options, paths = _ParseArgs(args)
+  if ((options.proguard_enabled == 'true'
+          and options.configuration_name == 'Release')
+      or (options.debug_build_proguard_enabled == 'true'
+          and options.configuration_name == 'Debug')):
+    paths = [options.proguard_enabled_input_path]
+
+  if options.inputs:
+    paths += options.inputs
+
+  if options.excluded_paths:
+    # Excluded paths are relative to the output directory.
+    exclude_paths = options.excluded_paths
+    paths = [p for p in paths if not
+             os.path.relpath(p, options.output_directory) in exclude_paths]
+
+  input_paths = list(paths)
+
+  dx_binary = os.path.join(options.android_sdk_tools, 'dx')
+  # See http://crbug.com/272064 for context on --force-jumbo.
+  # See https://github.com/android/platform_dalvik/commit/dd140a22d for
+  # --num-threads.
+  dex_cmd = [dx_binary, '--num-threads=8', '--dex', '--force-jumbo',
+             '--output', options.dex_path]
+  if options.no_locals != '0':
+    dex_cmd.append('--no-locals')
+
+  if options.multi_dex:
+    input_paths.append(options.main_dex_list_path)
+    dex_cmd += [
+      '--multi-dex',
+      '--minimal-main-dex',
+    ]
+
+  output_paths = [
+    options.dex_path,
+    options.dex_path + '.inputs',
+  ]
+
+  # An escape hatch to be able to check if incremental dexing is causing
+  # problems.
+  force = int(os.environ.get('DISABLE_INCREMENTAL_DX', 0))
+
+  build_utils.CallAndWriteDepfileIfStale(
+      lambda changes: _OnStaleMd5(changes, options, dex_cmd, paths),
+      options,
+      input_paths=input_paths,
+      input_strings=dex_cmd,
+      output_paths=output_paths,
+      force=force,
+      pass_changes=True)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/emma_instr.py b/build/android/gyp/emma_instr.py
new file mode 100755
index 0000000..9ba6776
--- /dev/null
+++ b/build/android/gyp/emma_instr.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Instruments classes and jar files.
+
+This script corresponds to the 'emma_instr' action in the java build process.
+Depending on whether emma_instrument is set, the 'emma_instr' action will either
+call the instrument command or the copy command.
+
+Possible commands are:
+- instrument_jar: Accepts a jar and instruments it using emma.jar.
+- copy: Called when EMMA coverage is not enabled. This allows us to make
+      this a required step without necessarily instrumenting on every build.
+      Also removes any stale coverage files.
+"""
+
+import collections
+import json
+import os
+import shutil
+import sys
+import tempfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.utils import command_option_parser
+
+from util import build_utils
+
+
+def _AddCommonOptions(option_parser):
+  """Adds common options to |option_parser|."""
+  build_utils.AddDepfileOption(option_parser)
+  option_parser.add_option('--input-path',
+                           help=('Path to input file(s). Either the classes '
+                                 'directory, or the path to a jar.'))
+  option_parser.add_option('--output-path',
+                           help=('Path to output final file(s) to. Either the '
+                                 'final classes directory, or the directory in '
+                                 'which to place the instrumented/copied jar.'))
+  option_parser.add_option('--stamp', help='Path to touch when done.')
+  option_parser.add_option('--coverage-file',
+                           help='File to create with coverage metadata.')
+  option_parser.add_option('--sources-list-file',
+                           help='File to create with the list of sources.')
+
+
+def _AddInstrumentOptions(option_parser):
+  """Adds options related to instrumentation to |option_parser|."""
+  _AddCommonOptions(option_parser)
+  option_parser.add_option('--source-dirs',
+                           help='Space separated list of source directories. '
+                                'source-files should not be specified if '
+                                'source-dirs is specified')
+  option_parser.add_option('--source-files',
+                           help='Space separated list of source files. '
+                                'source-dirs should not be specified if '
+                                'source-files is specified')
+  option_parser.add_option('--src-root',
+                           help='Root of the src repository.')
+  option_parser.add_option('--emma-jar',
+                           help='Path to emma.jar.')
+  option_parser.add_option(
+      '--filter-string', default='',
+      help=('Filter string consisting of a list of inclusion/exclusion '
+            'patterns separated with whitespace and/or comma.'))
+
+
+def _RunCopyCommand(_command, options, _, option_parser):
+  """Copies the jar from input to output locations.
+
+  Also removes any old coverage/sources file.
+
+  Args:
+    command: String indicating the command that was received to trigger
+        this function.
+    options: optparse options dictionary.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    An exit code.
+  """
+  if not (options.input_path and options.output_path and
+          options.coverage_file and options.sources_list_file):
+    option_parser.error('All arguments are required.')
+
+  if os.path.exists(options.coverage_file):
+    os.remove(options.coverage_file)
+  if os.path.exists(options.sources_list_file):
+    os.remove(options.sources_list_file)
+
+  shutil.copy(options.input_path, options.output_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile,
+                             build_utils.GetPythonDependencies())
+
+
+def _GetSourceDirsFromSourceFiles(source_files_string):
+  """Returns list of directories for the files in |source_files_string|.
+
+  Args:
+    source_files_string: String generated from GN or GYP containing the list
+      of source files.
+
+  Returns:
+    List of source directories.
+  """
+  source_files = build_utils.ParseGypList(source_files_string)
+  return list(set(os.path.dirname(source_file) for source_file in source_files))
+
+
+def _CreateSourcesListFile(source_dirs, sources_list_file, src_root):
+  """Adds all normalized source directories to |sources_list_file|.
+
+  Args:
+    source_dirs: List of source directories.
+    sources_list_file: File into which to write the JSON list of sources.
+    src_root: Root which sources added to the file should be relative to.
+
+  Returns:
+    An exit code.
+  """
+  src_root = os.path.abspath(src_root)
+  relative_sources = []
+  for s in source_dirs:
+    abs_source = os.path.abspath(s)
+    if abs_source[:len(src_root)] != src_root:
+      print ('Error: found source directory not under repository root: %s %s'
+             % (abs_source, src_root))
+      return 1
+    rel_source = os.path.relpath(abs_source, src_root)
+
+    relative_sources.append(rel_source)
+
+  with open(sources_list_file, 'w') as f:
+    json.dump(relative_sources, f)
+
+
+def _RunInstrumentCommand(_command, options, _, option_parser):
+  """Instruments jar files using EMMA.
+
+  Args:
+    command: String indicating the command that was received to trigger
+        this function.
+    options: optparse options dictionary.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    An exit code.
+  """
+  if not (options.input_path and options.output_path and
+          options.coverage_file and options.sources_list_file and
+          (options.source_files or options.source_dirs) and
+          options.src_root and options.emma_jar):
+    option_parser.error('All arguments are required.')
+
+  if os.path.exists(options.coverage_file):
+    os.remove(options.coverage_file)
+  temp_dir = tempfile.mkdtemp()
+  try:
+    cmd = ['java', '-cp', options.emma_jar,
+           'emma', 'instr',
+           '-ip', options.input_path,
+           '-ix', options.filter_string,
+           '-d', temp_dir,
+           '-out', options.coverage_file,
+           '-m', 'fullcopy']
+    build_utils.CheckOutput(cmd)
+
+    # File is not generated when filter_string doesn't match any files.
+    if not os.path.exists(options.coverage_file):
+      build_utils.Touch(options.coverage_file)
+
+    temp_jar_dir = os.path.join(temp_dir, 'lib')
+    jars = os.listdir(temp_jar_dir)
+    if len(jars) != 1:
+      print('Error: multiple output files in: %s' % (temp_jar_dir))
+      return 1
+
+    # Delete output_path first to avoid modifying input_path in the case where
+    # input_path is a hardlink to output_path. http://crbug.com/571642
+    if os.path.exists(options.output_path):
+      os.unlink(options.output_path)
+    shutil.move(os.path.join(temp_jar_dir, jars[0]), options.output_path)
+  finally:
+    shutil.rmtree(temp_dir)
+
+  if options.source_dirs:
+    source_dirs = build_utils.ParseGypList(options.source_dirs)
+  else:
+    source_dirs = _GetSourceDirsFromSourceFiles(options.source_files)
+  _CreateSourcesListFile(source_dirs, options.sources_list_file,
+                         options.src_root)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile,
+                             build_utils.GetPythonDependencies())
+
+  return 0
+
+
+CommandFunctionTuple = collections.namedtuple(
+    'CommandFunctionTuple', ['add_options_func', 'run_command_func'])
+VALID_COMMANDS = {
+    'copy': CommandFunctionTuple(_AddCommonOptions,
+                                 _RunCopyCommand),
+    'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions,
+                                           _RunInstrumentCommand),
+}
+
+
+def main():
+  option_parser = command_option_parser.CommandOptionParser(
+      commands_dict=VALID_COMMANDS)
+  command_option_parser.ParseAndExecute(option_parser)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/finalize_apk.py b/build/android/gyp/finalize_apk.py
new file mode 100755
index 0000000..d71cb8f
--- /dev/null
+++ b/build/android/gyp/finalize_apk.py
@@ -0,0 +1,161 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and zipaligns APK.
+
+"""
+
+import optparse
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+def RenameInflateAndAddPageAlignment(
+    rezip_apk_jar_path, in_zip_file, out_zip_file):
+  rezip_apk_cmd = [
+      'java',
+      '-classpath',
+      rezip_apk_jar_path,
+      'RezipApk',
+      'renamealign',
+      in_zip_file,
+      out_zip_file,
+    ]
+  build_utils.CheckOutput(rezip_apk_cmd)
+
+
+def ReorderAndAlignApk(rezip_apk_jar_path, in_zip_file, out_zip_file):
+  rezip_apk_cmd = [
+      'java',
+      '-classpath',
+      rezip_apk_jar_path,
+      'RezipApk',
+      'reorder',
+      in_zip_file,
+      out_zip_file,
+    ]
+  build_utils.CheckOutput(rezip_apk_cmd)
+
+
+def JarSigner(key_path, key_name, key_passwd, unsigned_path, signed_path):
+  shutil.copy(unsigned_path, signed_path)
+  sign_cmd = [
+      'jarsigner',
+      '-sigalg', 'MD5withRSA',
+      '-digestalg', 'SHA1',
+      '-keystore', key_path,
+      '-storepass', key_passwd,
+      signed_path,
+      key_name,
+    ]
+  build_utils.CheckOutput(sign_cmd)
+
+
+def AlignApk(zipalign_path, package_align, unaligned_path, final_path):
+  align_cmd = [
+      zipalign_path,
+      '-f'
+      ]
+
+  if package_align:
+    align_cmd += ['-p']
+
+  align_cmd += [
+      '4',  # 4 bytes
+      unaligned_path,
+      final_path,
+      ]
+  build_utils.CheckOutput(align_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--rezip-apk-jar-path',
+                    help='Path to the RezipApk jar file.')
+  parser.add_option('--zipalign-path', help='Path to the zipalign tool.')
+  parser.add_option('--page-align-shared-libraries',
+                    action='store_true',
+                    help='Page align shared libraries.')
+  parser.add_option('--unsigned-apk-path', help='Path to input unsigned APK.')
+  parser.add_option('--final-apk-path',
+      help='Path to output signed and aligned APK.')
+  parser.add_option('--key-path', help='Path to keystore for signing.')
+  parser.add_option('--key-passwd', help='Keystore password')
+  parser.add_option('--key-name', help='Keystore name')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--load-library-from-zip', type='int',
+      help='If non-zero, build the APK such that the library can be loaded ' +
+           'directly from the zip file using the crazy linker. The library ' +
+           'will be renamed, uncompressed and page aligned.')
+
+  options, _ = parser.parse_args()
+
+  input_paths = [
+    options.unsigned_apk_path,
+    options.key_path,
+  ]
+
+  if options.load_library_from_zip:
+    input_paths.append(options.rezip_apk_jar_path)
+
+  input_strings = [
+    options.load_library_from_zip,
+    options.key_name,
+    options.key_passwd,
+    options.page_align_shared_libraries,
+  ]
+
+  build_utils.CallAndWriteDepfileIfStale(
+      lambda: FinalizeApk(options),
+      options,
+      record_path=options.unsigned_apk_path + '.finalize.md5.stamp',
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=[options.final_apk_path])
+
+
+def FinalizeApk(options):
+  with tempfile.NamedTemporaryFile() as signed_apk_path_tmp, \
+      tempfile.NamedTemporaryFile() as apk_to_sign_tmp:
+
+    if options.load_library_from_zip:
+      # We alter the name of the library so that the Android Package Manager
+      # does not extract it into a separate file. This must be done before
+      # signing, as the filename is part of the signed manifest. At the same
+      # time we uncompress the library, which is necessary so that it can be
+      # loaded directly from the APK.
+      # Move the library to a page boundary by adding a page alignment file.
+      apk_to_sign = apk_to_sign_tmp.name
+      RenameInflateAndAddPageAlignment(
+          options.rezip_apk_jar_path, options.unsigned_apk_path, apk_to_sign)
+    else:
+      apk_to_sign = options.unsigned_apk_path
+
+    signed_apk_path = signed_apk_path_tmp.name
+    JarSigner(options.key_path, options.key_name, options.key_passwd,
+              apk_to_sign, signed_apk_path)
+
+    if options.load_library_from_zip:
+      # Reorder the contents of the APK. This re-establishes the canonical
+      # order which means the library will be back at its page aligned location.
+      # This step also aligns uncompressed items to 4 bytes.
+      ReorderAndAlignApk(
+          options.rezip_apk_jar_path, signed_apk_path, options.final_apk_path)
+    else:
+      # Align uncompressed items to 4 bytes
+      AlignApk(options.zipalign_path,
+               options.page_align_shared_libraries,
+               signed_apk_path,
+               options.final_apk_path)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/finalize_splits.py b/build/android/gyp/finalize_splits.py
new file mode 100755
index 0000000..a6796bb
--- /dev/null
+++ b/build/android/gyp/finalize_splits.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and zipaligns split APKs.
+
+This script is require only by GYP (not GN).
+"""
+
+import optparse
+import sys
+
+import finalize_apk
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--zipalign-path', help='Path to the zipalign tool.')
+  parser.add_option('--resource-packaged-apk-path',
+      help='Base path to input .ap_s.')
+  parser.add_option('--base-output-path',
+      help='Path to output .apk, minus extension.')
+  parser.add_option('--key-path', help='Path to keystore for signing.')
+  parser.add_option('--key-passwd', help='Keystore password')
+  parser.add_option('--key-name', help='Keystore name')
+  parser.add_option('--densities',
+      help='Comma separated list of densities finalize.')
+  parser.add_option('--languages',
+      help='GYP list of language splits to finalize.')
+
+  options, _ = parser.parse_args()
+  options.load_library_from_zip = 0
+
+  if options.densities:
+    for density in options.densities.split(','):
+      options.unsigned_apk_path = ("%s_%s" %
+          (options.resource_packaged_apk_path, density))
+      options.final_apk_path = ("%s-density-%s.apk" %
+          (options.base_output_path, density))
+      finalize_apk.FinalizeApk(options)
+
+  if options.languages:
+    for lang in build_utils.ParseGypList(options.languages):
+      options.unsigned_apk_path = ("%s_%s" %
+          (options.resource_packaged_apk_path, lang))
+      options.final_apk_path = ("%s-lang-%s.apk" %
+          (options.base_output_path, lang))
+      finalize_apk.FinalizeApk(options)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/find.py b/build/android/gyp/find.py
new file mode 100755
index 0000000..a9f1d49
--- /dev/null
+++ b/build/android/gyp/find.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Finds files in directories.
+"""
+
+import fnmatch
+import optparse
+import os
+import sys
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--pattern', default='*', help='File pattern to match.')
+  options, directories = parser.parse_args(argv)
+
+  for d in directories:
+    if not os.path.exists(d):
+      print >> sys.stderr, '%s does not exist' % d
+      return 1
+    for root, _, filenames in os.walk(d):
+      for f in fnmatch.filter(filenames, options.pattern):
+        print os.path.join(root, f)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/find_sun_tools_jar.py b/build/android/gyp/find_sun_tools_jar.py
new file mode 100755
index 0000000..2f15a15
--- /dev/null
+++ b/build/android/gyp/find_sun_tools_jar.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This finds the java distribution's tools.jar and copies it somewhere.
+"""
+
+import argparse
+import os
+import re
+import shutil
+import sys
+
+from util import build_utils
+
+RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]')
+
+def main():
+  parser = argparse.ArgumentParser(description='Find Sun Tools Jar')
+  parser.add_argument('--depfile',
+                      help='Path to depfile. This must be specified as the '
+                           'action\'s first output.')
+  parser.add_argument('--output', required=True)
+  args = parser.parse_args()
+
+  sun_tools_jar_path = FindSunToolsJarPath()
+
+  if sun_tools_jar_path is None:
+    raise Exception("Couldn\'t find tools.jar")
+
+  # Using copyfile instead of copy() because copy() calls copymode()
+  # We don't want the locked mode because we may copy over this file again
+  shutil.copyfile(sun_tools_jar_path, args.output)
+
+  if args.depfile:
+    build_utils.WriteDepfile(
+        args.depfile,
+        [sun_tools_jar_path] + build_utils.GetPythonDependencies())
+
+
+def FindSunToolsJarPath():
+  # This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7
+  stdout = build_utils.CheckOutput(
+      ["java", "-verbose", "-version"], print_stderr=False)
+  for ln in stdout.splitlines():
+    match = RT_JAR_FINDER.match(ln)
+    if match:
+      return os.path.join(match.group(1), 'lib', 'tools.jar')
+
+  return None
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/gcc_preprocess.py b/build/android/gyp/gcc_preprocess.py
new file mode 100755
index 0000000..03becf9
--- /dev/null
+++ b/build/android/gyp/gcc_preprocess.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def DoGcc(options):
+  build_utils.MakeDirectory(os.path.dirname(options.output))
+
+  gcc_cmd = [ 'gcc' ]  # invoke host gcc.
+  if options.defines:
+    gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), []))
+  gcc_cmd.extend([
+      '-E',                  # stop after preprocessing.
+      '-D', 'ANDROID',       # Specify ANDROID define for pre-processor.
+      '-x', 'c-header',      # treat sources as C header files
+      '-P',                  # disable line markers, i.e. '#line 309'
+      '-I', options.include_path,
+      '-o', options.output,
+      options.template
+      ])
+
+  build_utils.CheckOutput(gcc_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--include-path', help='Include path for gcc.')
+  parser.add_option('--template', help='Path to template.')
+  parser.add_option('--output', help='Path for generated file.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--defines', help='Pre-defines macros', action='append')
+
+  options, _ = parser.parse_args(args)
+
+  DoGcc(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/generate_copy_ex_outputs.py b/build/android/gyp/generate_copy_ex_outputs.py
new file mode 100755
index 0000000..e425b4a
--- /dev/null
+++ b/build/android/gyp/generate_copy_ex_outputs.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Generate outputs according source files and destination path for
+# copy_ex.gypi
+
+import argparse
+import os
+import sys
+
+def DoMain(argv):
+  parser = argparse.ArgumentParser(prog='generate_copy_ex_outputs')
+  parser.add_argument('--src-files',
+                      nargs = '+',
+                      help = 'a list of files to copy')
+  parser.add_argument('--dest-path',
+                      required = True,
+                      help = 'the directory to copy file to')
+  options = parser.parse_args(argv)
+  # Quote each element so filename spaces don't mess up gyp's attempt to parse
+  # it into a list.
+  return ' '.join('"%s"' % os.path.join(options.dest_path,
+                                        os.path.basename(src))
+                  for src in options.src_files)
+
+if __name__ == '__main__':
+  results = DoMain(sys.argv[1:])
+  if results:
+    print results
+
diff --git a/build/android/gyp/generate_resource_rewriter.py b/build/android/gyp/generate_resource_rewriter.py
new file mode 100755
index 0000000..b6202ed
--- /dev/null
+++ b/build/android/gyp/generate_resource_rewriter.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate ResourceRewriter.java which overwrites the given package's
+   resource id.
+"""
+
+import argparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                             '..',
+                                             '..',
+                                             '..',
+                                             'third_party')))
+import jinja2
+
+
+RESOURCE_REWRITER_JAVA="ResourceRewriter.java"
+
+RESOURCE_REWRITER="""/* AUTO-GENERATED FILE.  DO NOT MODIFY. */
+
+package {{ package }};
+/**
+ * Helper class used to fix up resource ids.
+ */
+class ResourceRewriter {
+    /**
+     * Rewrite the R 'constants' for the WebView.
+     */
+    public static void rewriteRValues(final int packageId) {
+        {% for res_package in res_packages %}
+        {{ res_package }}.R.onResourcesLoaded(packageId);
+        {% endfor %}
+    }
+}
+"""
+
+def ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An Namespace from argparse.parse_args()
+  """
+  parser = argparse.ArgumentParser(prog='generate_resource_rewriter')
+
+  parser.add_argument('--package-name',
+                      required=True,
+                      help='The package name of ResourceRewriter.')
+  parser.add_argument('--dep-packages',
+                      required=True,
+                      help='A list of packages whose resource id will be'
+                           'overwritten in ResourceRewriter.')
+  parser.add_argument('--output-dir',
+                      help='A output directory of generated'
+                           ' ResourceRewriter.java')
+  parser.add_argument('--srcjar',
+                      help='The path of generated srcjar which has'
+                           ' ResourceRewriter.java')
+
+  return parser.parse_args(args)
+
+
+def CreateResourceRewriter(package, res_packages, output_dir):
+  build_utils.MakeDirectory(output_dir)
+  java_path = os.path.join(output_dir, RESOURCE_REWRITER_JAVA)
+  template = jinja2.Template(RESOURCE_REWRITER,
+                             trim_blocks=True,
+                             lstrip_blocks=True)
+  output = template.render(package=package, res_packages=res_packages)
+  with open(java_path, 'w') as f:
+    f.write(output)
+
+def CreateResourceRewriterSrcjar(package, res_packages, srcjar_path):
+  with build_utils.TempDir() as temp_dir:
+    output_dir = os.path.join(temp_dir, *package.split('.'))
+    CreateResourceRewriter(package, res_packages, output_dir)
+    build_utils.DoZip([os.path.join(output_dir, RESOURCE_REWRITER_JAVA)],
+                      srcjar_path,
+                      temp_dir)
+
+
+def main():
+  options = ParseArgs(build_utils.ExpandFileArgs(sys.argv[1:]))
+  package = options.package_name
+  if options.output_dir:
+    output_dir = os.path.join(options.output_dir, *package.split('.'))
+    CreateResourceRewriter(
+        package,
+        build_utils.ParseGypList(options.dep_packages),
+        output_dir)
+  else:
+    CreateResourceRewriterSrcjar(
+        package,
+        build_utils.ParseGypList(options.dep_packages),
+        options.srcjar)
+
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/generate_split_manifest.py b/build/android/gyp/generate_split_manifest.py
new file mode 100755
index 0000000..9cb3bca
--- /dev/null
+++ b/build/android/gyp/generate_split_manifest.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an AndroidManifest.xml for an APK split.
+
+Given the manifest file for the main APK, generates an AndroidManifest.xml with
+the value required for a Split APK (package, versionCode, etc).
+"""
+
+import optparse
+import xml.etree.ElementTree
+
+from util import build_utils
+
+MANIFEST_TEMPLATE = """<?xml version="1.0" encoding="utf-8"?>
+<manifest
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    package="%(package)s"
+    split="%(split)s">
+  <uses-sdk android:minSdkVersion="21" />
+  <application android:hasCode="%(has_code)s">
+  </application>
+</manifest>
+"""
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--main-manifest', help='The main manifest of the app')
+  parser.add_option('--out-manifest', help='The output manifest')
+  parser.add_option('--split', help='The name of the split')
+  parser.add_option(
+      '--has-code',
+      action='store_true',
+      default=False,
+      help='Whether the split will contain a .dex file')
+
+  (options, args) = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('main_manifest', 'out_manifest', 'split')
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  return options
+
+
+def Build(main_manifest, split, has_code):
+  """Builds a split manifest based on the manifest of the main APK.
+
+  Args:
+    main_manifest: the XML manifest of the main APK as a string
+    split: the name of the split as a string
+    has_code: whether this split APK will contain .dex files
+
+  Returns:
+    The XML split manifest as a string
+  """
+
+  doc = xml.etree.ElementTree.fromstring(main_manifest)
+  package = doc.get('package')
+
+  return MANIFEST_TEMPLATE % {
+      'package': package,
+      'split': split.replace('-', '_'),
+      'has_code': str(has_code).lower()
+  }
+
+
+def main():
+  options = ParseArgs()
+  main_manifest = file(options.main_manifest).read()
+  split_manifest = Build(
+      main_manifest,
+      options.split,
+      options.has_code)
+
+  with file(options.out_manifest, 'w') as f:
+    f.write(split_manifest)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        [options.main_manifest] + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/generate_v14_compatible_resources.py b/build/android/gyp/generate_v14_compatible_resources.py
new file mode 100755
index 0000000..fc7abba
--- /dev/null
+++ b/build/android/gyp/generate_v14_compatible_resources.py
@@ -0,0 +1,324 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert Android xml resources to API 14 compatible.
+
+There are two reasons that we cannot just use API 17 attributes,
+so we are generating another set of resources by this script.
+
+1. paddingStart attribute can cause a crash on Galaxy Tab 2.
+2. There is a bug that paddingStart does not override paddingLeft on
+   JB-MR1. This is fixed on JB-MR2. b/8654490
+
+Therefore, this resource generation script can be removed when
+we drop the support for JB-MR1.
+
+Please refer to http://crbug.com/235118 for the details.
+"""
+
+import codecs
+import optparse
+import os
+import re
+import shutil
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+
+# Note that we are assuming 'android:' is an alias of
+# the namespace 'http://schemas.android.com/apk/res/android'.
+
+GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity')
+
+# Almost all the attributes that has "Start" or "End" in
+# its name should be mapped.
+ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft',
+                     'drawableStart' : 'drawableLeft',
+                     'layout_alignStart' : 'layout_alignLeft',
+                     'layout_marginStart' : 'layout_marginLeft',
+                     'layout_alignParentStart' : 'layout_alignParentLeft',
+                     'layout_toStartOf' : 'layout_toLeftOf',
+                     'paddingEnd' : 'paddingRight',
+                     'drawableEnd' : 'drawableRight',
+                     'layout_alignEnd' : 'layout_alignRight',
+                     'layout_marginEnd' : 'layout_marginRight',
+                     'layout_alignParentEnd' : 'layout_alignParentRight',
+                     'layout_toEndOf' : 'layout_toRightOf'}
+
+ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v
+                         in ATTRIBUTES_TO_MAP.iteritems())
+
+ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v
+                                  in ATTRIBUTES_TO_MAP.iteritems())
+
+
+def IterateXmlElements(node):
+  """minidom helper function that iterates all the element nodes.
+  Iteration order is pre-order depth-first."""
+  if node.nodeType == node.ELEMENT_NODE:
+    yield node
+  for child_node in node.childNodes:
+    for child_node_element in IterateXmlElements(child_node):
+      yield child_node_element
+
+
+def ParseAndReportErrors(filename):
+  try:
+    return minidom.parse(filename)
+  except Exception: # pylint: disable=broad-except
+    import traceback
+    traceback.print_exc()
+    sys.stderr.write('Failed to parse XML file: %s\n' % filename)
+    sys.exit(1)
+
+
+def AssertNotDeprecatedAttribute(name, value, filename):
+  """Raises an exception if the given attribute is deprecated."""
+  msg = None
+  if name in ATTRIBUTES_TO_MAP_REVERSED:
+    msg = '{0} should use {1} instead of {2}'.format(filename,
+        ATTRIBUTES_TO_MAP_REVERSED[name], name)
+  elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value):
+    msg = '{0} should use start/end instead of left/right for {1}'.format(
+        filename, name)
+
+  if msg:
+    msg += ('\nFor background, see: http://android-developers.blogspot.com/'
+            '2013/03/native-rtl-support-in-android-42.html\n'
+            'If you have a legitimate need for this attribute, discuss with '
+            'kkimlabs@chromium.org or newt@chromium.org')
+    raise Exception(msg)
+
+
+def WriteDomToFile(dom, filename):
+  """Write the given dom to filename."""
+  build_utils.MakeDirectory(os.path.dirname(filename))
+  with codecs.open(filename, 'w', 'utf-8') as f:
+    dom.writexml(f, '', '  ', '\n', encoding='utf-8')
+
+
+def HasStyleResource(dom):
+  """Return True if the dom is a style resource, False otherwise."""
+  root_node = IterateXmlElements(dom).next()
+  return bool(root_node.nodeName == 'resources' and
+              list(root_node.getElementsByTagName('style')))
+
+
+def ErrorIfStyleResourceExistsInDir(input_dir):
+  """If a style resource is in input_dir, raises an exception."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    dom = ParseAndReportErrors(input_filename)
+    if HasStyleResource(dom):
+      # Allow style file in third_party to exist in non-v17 directories so long
+      # as they do not contain deprecated attributes.
+      if not 'third_party' in input_dir or (
+          GenerateV14StyleResourceDom(dom, input_filename)):
+        raise Exception('error: style file ' + input_filename +
+                        ' should be under ' + input_dir +
+                        '-v17 directory. Please refer to '
+                        'http://crbug.com/243952 for the details.')
+
+
+def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True):
+  """Convert layout resource to API 14 compatible layout resource.
+
+  Args:
+    dom: Parsed minidom object to be modified.
+    filename: Filename that the DOM was parsed from.
+    assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+                           cause an exception to be thrown.
+
+  Returns:
+    True if dom is modified, False otherwise.
+  """
+  is_modified = False
+
+  # Iterate all the elements' attributes to find attributes to convert.
+  for element in IterateXmlElements(dom):
+    for name, value in list(element.attributes.items()):
+      # Convert any API 17 Start/End attributes to Left/Right attributes.
+      # For example, from paddingStart="10dp" to paddingLeft="10dp"
+      # Note: gravity attributes are not necessary to convert because
+      # start/end values are backward-compatible. Explained at
+      # https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom
+      if name in ATTRIBUTES_TO_MAP:
+        element.setAttribute(ATTRIBUTES_TO_MAP[name], value)
+        del element.attributes[name]
+        is_modified = True
+      elif assert_not_deprecated:
+        AssertNotDeprecatedAttribute(name, value, filename)
+
+  return is_modified
+
+
+def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True):
+  """Convert style resource to API 14 compatible style resource.
+
+  Args:
+    dom: Parsed minidom object to be modified.
+    filename: Filename that the DOM was parsed from.
+    assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+                           cause an exception to be thrown.
+
+  Returns:
+    True if dom is modified, False otherwise.
+  """
+  is_modified = False
+
+  for style_element in dom.getElementsByTagName('style'):
+    for item_element in style_element.getElementsByTagName('item'):
+      name = item_element.attributes['name'].value
+      value = item_element.childNodes[0].nodeValue
+      if name in ATTRIBUTES_TO_MAP:
+        item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name]
+        is_modified = True
+      elif assert_not_deprecated:
+        AssertNotDeprecatedAttribute(name, value, filename)
+
+  return is_modified
+
+
+def GenerateV14LayoutResource(input_filename, output_v14_filename,
+                              output_v17_filename):
+  """Convert API 17 layout resource to API 14 compatible layout resource.
+
+  It's mostly a simple replacement, s/Start/Left s/End/Right,
+  on the attribute names.
+  If the generated resource is identical to the original resource,
+  don't do anything. If not, write the generated resource to
+  output_v14_filename, and copy the original resource to output_v17_filename.
+  """
+  dom = ParseAndReportErrors(input_filename)
+  is_modified = GenerateV14LayoutResourceDom(dom, input_filename)
+
+  if is_modified:
+    # Write the generated resource.
+    WriteDomToFile(dom, output_v14_filename)
+
+    # Copy the original resource.
+    build_utils.MakeDirectory(os.path.dirname(output_v17_filename))
+    shutil.copy2(input_filename, output_v17_filename)
+
+
+def GenerateV14StyleResource(input_filename, output_v14_filename):
+  """Convert API 17 style resources to API 14 compatible style resource.
+
+  Write the generated style resource to output_v14_filename.
+  It's mostly a simple replacement, s/Start/Left s/End/Right,
+  on the attribute names.
+  """
+  dom = ParseAndReportErrors(input_filename)
+  GenerateV14StyleResourceDom(dom, input_filename)
+
+  # Write the generated resource.
+  WriteDomToFile(dom, output_v14_filename)
+
+
+def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir):
+  """Convert layout resources to API 14 compatible resources in input_dir."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    rel_filename = os.path.relpath(input_filename, input_dir)
+    output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+    output_v17_filename = os.path.join(output_v17_dir, rel_filename)
+    GenerateV14LayoutResource(input_filename, output_v14_filename,
+                              output_v17_filename)
+
+
+def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir):
+  """Convert style resources to API 14 compatible resources in input_dir."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    rel_filename = os.path.relpath(input_filename, input_dir)
+    output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+    GenerateV14StyleResource(input_filename, output_v14_filename)
+
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  parser.add_option('--res-dir',
+                    help='directory containing resources '
+                         'used to generate v14 compatible resources')
+  parser.add_option('--res-v14-compatibility-dir',
+                    help='output directory into which '
+                         'v14 compatible resources will be generated')
+  parser.add_option('--stamp', help='File to touch on success')
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('res_dir', 'res_v14_compatibility_dir')
+  build_utils.CheckOptions(options, parser, required=required_options)
+  return options
+
+def GenerateV14Resources(res_dir, res_v14_dir):
+  for name in os.listdir(res_dir):
+    if not os.path.isdir(os.path.join(res_dir, name)):
+      continue
+
+    dir_pieces = name.split('-')
+    resource_type = dir_pieces[0]
+    qualifiers = dir_pieces[1:]
+
+    api_level_qualifier_index = -1
+    api_level_qualifier = ''
+    for index, qualifier in enumerate(qualifiers):
+      if re.match('v[0-9]+$', qualifier):
+        api_level_qualifier_index = index
+        api_level_qualifier = qualifier
+        break
+
+    # Android pre-v17 API doesn't support RTL. Skip.
+    if 'ldrtl' in qualifiers:
+      continue
+
+    input_dir = os.path.abspath(os.path.join(res_dir, name))
+
+    # We also need to copy the original v17 resource to *-v17 directory
+    # because the generated v14 resource will hide the original resource.
+    output_v14_dir = os.path.join(res_v14_dir, name)
+    output_v17_dir = os.path.join(res_v14_dir, name + '-v17')
+
+    # We only convert layout resources under layout*/, xml*/,
+    # and style resources under values*/.
+    if resource_type in ('layout', 'xml'):
+      if not api_level_qualifier:
+        GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir,
+                                        output_v17_dir)
+    elif resource_type == 'values':
+      if api_level_qualifier == 'v17':
+        output_qualifiers = qualifiers[:]
+        del output_qualifiers[api_level_qualifier_index]
+        output_v14_dir = os.path.join(res_v14_dir,
+                                      '-'.join([resource_type] +
+                                               output_qualifiers))
+        GenerateV14StyleResourcesInDir(input_dir, output_v14_dir)
+      elif not api_level_qualifier:
+        ErrorIfStyleResourceExistsInDir(input_dir)
+
+def main():
+  options = ParseArgs()
+
+  res_v14_dir = options.res_v14_compatibility_dir
+
+  build_utils.DeleteDirectory(res_v14_dir)
+  build_utils.MakeDirectory(res_v14_dir)
+
+  GenerateV14Resources(options.res_dir, res_v14_dir)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/get_device_configuration.py b/build/android/gyp/get_device_configuration.py
new file mode 100755
index 0000000..0ec08ef
--- /dev/null
+++ b/build/android/gyp/get_device_configuration.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Gets and writes the configurations of the attached devices.
+
+This configuration is used by later build steps to determine which devices to
+install to and what needs to be installed to those devices.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_device
+from util import build_utils
+
+BUILD_ANDROID_DIR = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..'))
+sys.path.append(BUILD_ANDROID_DIR)
+
+import devil_chromium
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--stamp', action='store')
+  parser.add_option('--output', action='store')
+  parser.add_option('--output-directory', action='store')
+  options, _ = parser.parse_args(argv)
+
+  devil_chromium.Initialize(
+      output_directory=os.path.abspath(options.output_directory))
+
+  devices = build_device.GetAttachedDevices()
+
+  device_configurations = []
+  for d in devices:
+    configuration, is_online, has_root = (
+        build_device.GetConfigurationForDevice(d))
+
+    if not is_online:
+      build_utils.PrintBigWarning(
+          '%s is not online. Skipping managed install for this device. '
+          'Try rebooting the device to fix this warning.' % d)
+      continue
+
+    if not has_root:
+      build_utils.PrintBigWarning(
+          '"adb root" failed on device: %s\n'
+          'Skipping managed install for this device.'
+          % configuration['description'])
+      continue
+
+    device_configurations.append(configuration)
+
+  if len(device_configurations) == 0:
+    build_utils.PrintBigWarning(
+        'No valid devices attached. Skipping managed install steps.')
+  elif len(devices) > 1:
+    # Note that this checks len(devices) and not len(device_configurations).
+    # This way, any time there are multiple devices attached it is
+    # explicitly stated which device we will install things to even if all but
+    # one device were rejected for other reasons (e.g. two devices attached,
+    # one w/o root).
+    build_utils.PrintBigWarning(
+        'Multiple devices attached. '
+        'Installing to the preferred device: '
+        '%(id)s (%(description)s)' % (device_configurations[0]))
+
+
+  build_device.WriteConfigurations(device_configurations, options.output)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/insert_chromium_version.py b/build/android/gyp/insert_chromium_version.py
new file mode 100755
index 0000000..171f9d4
--- /dev/null
+++ b/build/android/gyp/insert_chromium_version.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Insert a version string into a library as a section '.chromium.version'.
+"""
+
+import optparse
+import os
+import sys
+import tempfile
+
+from util import build_utils
+
+def InsertChromiumVersion(android_objcopy,
+                          library_path,
+                          version_string):
+  # Remove existing .chromium.version section from .so
+  objcopy_command = [android_objcopy,
+                     '--remove-section=.chromium.version',
+                     library_path]
+  build_utils.CheckOutput(objcopy_command)
+
+  # Add a .chromium.version section.
+  with tempfile.NamedTemporaryFile() as stream:
+    stream.write(version_string)
+    stream.flush()
+    objcopy_command = [android_objcopy,
+                       '--add-section', '.chromium.version=%s' % stream.name,
+                       library_path]
+    build_utils.CheckOutput(objcopy_command)
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+
+  parser.add_option('--android-objcopy',
+      help='Path to the toolchain\'s objcopy binary')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory of native libraries')
+  parser.add_option('--libraries',
+      help='List of libraries')
+  parser.add_option('--version-string',
+      help='Version string to be inserted')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  for library in libraries:
+    library_path = os.path.join(options.stripped_libraries_dir, library)
+
+    InsertChromiumVersion(options.android_objcopy,
+                          library_path,
+                          options.version_string)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/jar.py b/build/android/gyp/jar.py
new file mode 100755
index 0000000..cfa5e50
--- /dev/null
+++ b/build/android/gyp/jar.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+_RESOURCE_CLASSES = [
+    "R.class",
+    "R##*.class",
+    "Manifest.class",
+    "Manifest##*.class",
+]
+
+
+def Jar(class_files, classes_dir, jar_path, manifest_file=None):
+  jar_path = os.path.abspath(jar_path)
+
+  # The paths of the files in the jar will be the same as they are passed in to
+  # the command. Because of this, the command should be run in
+  # options.classes_dir so the .class file paths in the jar are correct.
+  jar_cwd = classes_dir
+  class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files]
+  jar_cmd = ['jar', 'cf0', jar_path]
+  if manifest_file:
+    jar_cmd[1] += 'm'
+    jar_cmd.append(os.path.abspath(manifest_file))
+  jar_cmd.extend(class_files_rel)
+
+  if not class_files_rel:
+    empty_file = os.path.join(classes_dir, '.empty')
+    build_utils.Touch(empty_file)
+    jar_cmd.append(os.path.relpath(empty_file, jar_cwd))
+  build_utils.CheckOutput(jar_cmd, cwd=jar_cwd)
+  build_utils.Touch(jar_path, fail_if_missing=True)
+
+
+def JarDirectory(classes_dir, jar_path, manifest_file=None, predicate=None):
+  class_files = build_utils.FindInDirectory(classes_dir, '*.class')
+  if predicate:
+    class_files = [f for f in class_files if predicate(f)]
+
+  Jar(class_files, classes_dir, jar_path, manifest_file=manifest_file)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--classes-dir', help='Directory containing .class files.')
+  parser.add_option('--input-jar', help='Jar to include .class files from')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option('--excluded-classes',
+      help='GYP list of .class file patterns to exclude from the jar.')
+  parser.add_option('--strip-resource-classes-for',
+      help='GYP list of java package names exclude R.class files in.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+  options, _ = parser.parse_args(args)
+  # Current implementation supports just one or the other of these:
+  assert not options.classes_dir or not options.input_jar
+
+  excluded_classes = []
+  if options.excluded_classes:
+    excluded_classes = build_utils.ParseGypList(options.excluded_classes)
+
+  if options.strip_resource_classes_for:
+    packages = build_utils.ParseGypList(options.strip_resource_classes_for)
+    excluded_classes.extend(p.replace('.', '/') + '/' + f
+                            for p in packages for f in _RESOURCE_CLASSES)
+
+  predicate = None
+  if excluded_classes:
+    predicate = lambda f: not build_utils.MatchesGlob(f, excluded_classes)
+
+  with build_utils.TempDir() as temp_dir:
+    classes_dir = options.classes_dir
+    if options.input_jar:
+      build_utils.ExtractAll(options.input_jar, temp_dir)
+      classes_dir = temp_dir
+    JarDirectory(classes_dir, options.jar_path, predicate=predicate)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/jar_toc.py b/build/android/gyp/jar_toc.py
new file mode 100755
index 0000000..b830956
--- /dev/null
+++ b/build/android/gyp/jar_toc.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a TOC file from a Java jar.
+
+The TOC file contains the non-package API of the jar. This includes all
+public/protected/package classes/functions/members and the values of static
+final variables (members with package access are kept because in some cases we
+have multiple libraries with the same package, particularly test+non-test). Some
+other information (major/minor javac version) is also included.
+
+This TOC file then can be used to determine if a dependent library should be
+rebuilt when this jar changes. I.e. any change to the jar that would require a
+rebuild, will have a corresponding change in the TOC file.
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import md5_check
+
+
+def GetClassesInZipFile(zip_file):
+  classes = []
+  files = zip_file.namelist()
+  for f in files:
+    if f.endswith('.class'):
+      # f is of the form org/chromium/base/Class$Inner.class
+      classes.append(f.replace('/', '.')[:-6])
+  return classes
+
+
+def CallJavap(classpath, classes):
+  javap_cmd = [
+      'javap',
+      '-package',  # Show public/protected/package.
+      # -verbose is required to get constant values (which can be inlined in
+      # dependents).
+      '-verbose',
+      '-J-XX:NewSize=4m',
+      '-classpath', classpath
+      ] + classes
+  return build_utils.CheckOutput(javap_cmd)
+
+
+def ExtractToc(disassembled_classes):
+  # javap output is structured by indent (2-space) levels.
+  good_patterns = [
+      '^[^ ]', # This includes all class signatures.
+      '^  SourceFile:',
+      '^  minor version:',
+      '^  major version:',
+      '^  Constant value:',
+      '^  public ',
+      '^  protected ',
+      ]
+  bad_patterns = [
+      '^const #', # Matches the constant pool (i.e. literals used in the class).
+    ]
+
+  def JavapFilter(line):
+    return (re.match('|'.join(good_patterns), line) and
+        not re.match('|'.join(bad_patterns), line))
+  toc = filter(JavapFilter, disassembled_classes.split('\n'))
+
+  return '\n'.join(toc)
+
+
+def UpdateToc(jar_path, toc_path):
+  classes = GetClassesInZipFile(zipfile.ZipFile(jar_path))
+  toc = ''
+  if len(classes) != 0:
+    javap_output = CallJavap(classpath=jar_path, classes=classes)
+    toc = ExtractToc(javap_output)
+
+  with open(toc_path, 'w') as tocfile:
+    tocfile.write(toc)
+
+
+def DoJarToc(options):
+  jar_path = options.jar_path
+  toc_path = options.toc_path
+  record_path = '%s.md5.stamp' % toc_path
+  md5_check.CallAndRecordIfStale(
+      lambda: UpdateToc(jar_path, toc_path),
+      record_path=record_path,
+      input_paths=[jar_path],
+      force=not os.path.exists(toc_path),
+      )
+  build_utils.Touch(toc_path, fail_if_missing=True)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--jar-path', help='Input .jar path.')
+  parser.add_option('--toc-path', help='Output .jar.TOC path.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  DoJarToc(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/java_cpp_enum.py b/build/android/gyp/java_cpp_enum.py
new file mode 100755
index 0000000..b304930
--- /dev/null
+++ b/build/android/gyp/java_cpp_enum.py
@@ -0,0 +1,369 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+from datetime import date
+import re
+import optparse
+import os
+from string import Template
+import sys
+import zipfile
+
+from util import build_utils
+
+# List of C++ types that are compatible with the Java code generated by this
+# script.
+#
+# This script can parse .idl files however, at present it ignores special
+# rules such as [cpp_enum_prefix_override="ax_attr"].
+ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char',
+  'short', 'unsigned short',
+  'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t']
+
+class EnumDefinition(object):
+  def __init__(self, original_enum_name=None, class_name_override=None,
+               enum_package=None, entries=None, fixed_type=None):
+    self.original_enum_name = original_enum_name
+    self.class_name_override = class_name_override
+    self.enum_package = enum_package
+    self.entries = collections.OrderedDict(entries or [])
+    self.prefix_to_strip = None
+    self.fixed_type = fixed_type
+
+  def AppendEntry(self, key, value):
+    if key in self.entries:
+      raise Exception('Multiple definitions of key %s found.' % key)
+    self.entries[key] = value
+
+  @property
+  def class_name(self):
+    return self.class_name_override or self.original_enum_name
+
+  def Finalize(self):
+    self._Validate()
+    self._AssignEntryIndices()
+    self._StripPrefix()
+
+  def _Validate(self):
+    assert self.class_name
+    assert self.enum_package
+    assert self.entries
+    if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST:
+      raise Exception('Fixed type %s for enum %s not whitelisted.' %
+          (self.fixed_type, self.class_name))
+
+  def _AssignEntryIndices(self):
+    # Enums, if given no value, are given the value of the previous enum + 1.
+    if not all(self.entries.values()):
+      prev_enum_value = -1
+      for key, value in self.entries.iteritems():
+        if not value:
+          self.entries[key] = prev_enum_value + 1
+        elif value in self.entries:
+          self.entries[key] = self.entries[value]
+        else:
+          try:
+            self.entries[key] = int(value)
+          except ValueError:
+            raise Exception('Could not interpret integer from enum value "%s" '
+                            'for key %s.' % (value, key))
+        prev_enum_value = self.entries[key]
+
+
+  def _StripPrefix(self):
+    prefix_to_strip = self.prefix_to_strip
+    if not prefix_to_strip:
+      prefix_to_strip = self.original_enum_name
+      prefix_to_strip = re.sub('(?!^)([A-Z]+)', r'_\1', prefix_to_strip).upper()
+      prefix_to_strip += '_'
+      if not all([w.startswith(prefix_to_strip) for w in self.entries.keys()]):
+        prefix_to_strip = ''
+
+    entries = collections.OrderedDict()
+    for (k, v) in self.entries.iteritems():
+      stripped_key = k.replace(prefix_to_strip, '', 1)
+      if isinstance(v, basestring):
+        stripped_value = v.replace(prefix_to_strip, '', 1)
+      else:
+        stripped_value = v
+      entries[stripped_key] = stripped_value
+
+    self.entries = entries
+
+class DirectiveSet(object):
+  class_name_override_key = 'CLASS_NAME_OVERRIDE'
+  enum_package_key = 'ENUM_PACKAGE'
+  prefix_to_strip_key = 'PREFIX_TO_STRIP'
+
+  known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
+
+  def __init__(self):
+    self._directives = {}
+
+  def Update(self, key, value):
+    if key not in DirectiveSet.known_keys:
+      raise Exception("Unknown directive: " + key)
+    self._directives[key] = value
+
+  @property
+  def empty(self):
+    return len(self._directives) == 0
+
+  def UpdateDefinition(self, definition):
+    definition.class_name_override = self._directives.get(
+        DirectiveSet.class_name_override_key, '')
+    definition.enum_package = self._directives.get(
+        DirectiveSet.enum_package_key)
+    definition.prefix_to_strip = self._directives.get(
+        DirectiveSet.prefix_to_strip_key)
+
+
+class HeaderParser(object):
+  single_line_comment_re = re.compile(r'\s*//')
+  multi_line_comment_start_re = re.compile(r'\s*/\*')
+  enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
+  enum_end_re = re.compile(r'^\s*}\s*;\.*$')
+  generator_directive_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
+  multi_line_generator_directive_start_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$')
+  multi_line_directive_continuation_re = re.compile(
+      r'^\s*//\s+([\.\w]+)$')
+  multi_line_directive_end_re = re.compile(
+      r'^\s*//\s+([\.\w]*)\)$')
+
+  optional_class_or_struct_re = r'(class|struct)?'
+  enum_name_re = r'(\w+)'
+  optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
+  enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' +
+      optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' +
+      optional_fixed_type_re + '\s*{\s*$')
+
+  def __init__(self, lines, path=None):
+    self._lines = lines
+    self._path = path
+    self._enum_definitions = []
+    self._in_enum = False
+    self._current_definition = None
+    self._generator_directives = DirectiveSet()
+    self._multi_line_generator_directive = None
+
+  def _ApplyGeneratorDirectives(self):
+    self._generator_directives.UpdateDefinition(self._current_definition)
+    self._generator_directives = DirectiveSet()
+
+  def ParseDefinitions(self):
+    for line in self._lines:
+      self._ParseLine(line)
+    return self._enum_definitions
+
+  def _ParseLine(self, line):
+    if self._multi_line_generator_directive:
+      self._ParseMultiLineDirectiveLine(line)
+    elif not self._in_enum:
+      self._ParseRegularLine(line)
+    else:
+      self._ParseEnumLine(line)
+
+  def _ParseEnumLine(self, line):
+    if HeaderParser.single_line_comment_re.match(line):
+      return
+    if HeaderParser.multi_line_comment_start_re.match(line):
+      raise Exception('Multi-line comments in enums are not supported in ' +
+                      self._path)
+    enum_end = HeaderParser.enum_end_re.match(line)
+    enum_entry = HeaderParser.enum_line_re.match(line)
+    if enum_end:
+      self._ApplyGeneratorDirectives()
+      self._current_definition.Finalize()
+      self._enum_definitions.append(self._current_definition)
+      self._in_enum = False
+    elif enum_entry:
+      enum_key = enum_entry.groups()[0]
+      enum_value = enum_entry.groups()[2]
+      self._current_definition.AppendEntry(enum_key, enum_value)
+
+  def _ParseMultiLineDirectiveLine(self, line):
+    multi_line_directive_continuation = (
+        HeaderParser.multi_line_directive_continuation_re.match(line))
+    multi_line_directive_end = (
+        HeaderParser.multi_line_directive_end_re.match(line))
+
+    if multi_line_directive_continuation:
+      value_cont = multi_line_directive_continuation.groups()[0]
+      self._multi_line_generator_directive[1].append(value_cont)
+    elif multi_line_directive_end:
+      directive_name = self._multi_line_generator_directive[0]
+      directive_value = "".join(self._multi_line_generator_directive[1])
+      directive_value += multi_line_directive_end.groups()[0]
+      self._multi_line_generator_directive = None
+      self._generator_directives.Update(directive_name, directive_value)
+    else:
+      raise Exception('Malformed multi-line directive declaration in ' +
+                      self._path)
+
+  def _ParseRegularLine(self, line):
+    enum_start = HeaderParser.enum_start_re.match(line)
+    generator_directive = HeaderParser.generator_directive_re.match(line)
+    multi_line_generator_directive_start = (
+        HeaderParser.multi_line_generator_directive_start_re.match(line))
+
+    if generator_directive:
+      directive_name = generator_directive.groups()[0]
+      directive_value = generator_directive.groups()[1]
+      self._generator_directives.Update(directive_name, directive_value)
+    elif multi_line_generator_directive_start:
+      directive_name = multi_line_generator_directive_start.groups()[0]
+      directive_value = multi_line_generator_directive_start.groups()[1]
+      self._multi_line_generator_directive = (directive_name, [directive_value])
+    elif enum_start:
+      if self._generator_directives.empty:
+        return
+      self._current_definition = EnumDefinition(
+          original_enum_name=enum_start.groups()[1],
+          fixed_type=enum_start.groups()[3])
+      self._in_enum = True
+
+def GetScriptName():
+  return os.path.basename(os.path.abspath(sys.argv[0]))
+
+def DoGenerate(source_paths):
+  for source_path in source_paths:
+    enum_definitions = DoParseHeaderFile(source_path)
+    if not enum_definitions:
+      raise Exception('No enums found in %s\n'
+                      'Did you forget prefixing enums with '
+                      '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' %
+                      source_path)
+    for enum_definition in enum_definitions:
+      package_path = enum_definition.enum_package.replace('.', os.path.sep)
+      file_name = enum_definition.class_name + '.java'
+      output_path = os.path.join(package_path, file_name)
+      output = GenerateOutput(source_path, enum_definition)
+      yield output_path, output
+
+
+def DoParseHeaderFile(path):
+  with open(path) as f:
+    return HeaderParser(f.readlines(), path).ParseDefinitions()
+
+
+def GenerateOutput(source_path, enum_definition):
+  template = Template("""
+// Copyright ${YEAR} The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     ${SCRIPT_NAME}
+// From
+//     ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${ENUM_ENTRIES}
+}
+""")
+
+  enum_template = Template('  public static final int ${NAME} = ${VALUE};')
+  enum_entries_string = []
+  for enum_name, enum_value in enum_definition.entries.iteritems():
+    values = {
+        'NAME': enum_name,
+        'VALUE': enum_value,
+    }
+    enum_entries_string.append(enum_template.substitute(values))
+  enum_entries_string = '\n'.join(enum_entries_string)
+
+  values = {
+      'CLASS_NAME': enum_definition.class_name,
+      'ENUM_ENTRIES': enum_entries_string,
+      'PACKAGE': enum_definition.enum_package,
+      'SCRIPT_NAME': GetScriptName(),
+      'SOURCE_PATH': source_path,
+      'YEAR': str(date.today().year)
+  }
+  return template.substitute(values)
+
+
+def AssertFilesList(output_paths, assert_files_list):
+  actual = set(output_paths)
+  expected = set(assert_files_list)
+  if not actual == expected:
+    need_to_add = list(actual - expected)
+    need_to_remove = list(expected - actual)
+    raise Exception('Output files list does not match expectations. Please '
+                    'add %s and remove %s.' % (need_to_add, need_to_remove))
+
+def DoMain(argv):
+  usage = 'usage: %prog [options] [output_dir] input_file(s)...'
+  parser = optparse.OptionParser(usage=usage)
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--assert_file', action="append", default=[],
+                    dest="assert_files_list", help='Assert that the given '
+                    'file is an output. There can be multiple occurrences of '
+                    'this flag.')
+  parser.add_option('--srcjar',
+                    help='When specified, a .srcjar at the given path is '
+                    'created instead of individual .java files.')
+  parser.add_option('--print_output_only', help='Only print output paths.',
+                    action='store_true')
+  parser.add_option('--verbose', help='Print more information.',
+                    action='store_true')
+
+  options, args = parser.parse_args(argv)
+
+  if options.srcjar:
+    if not args:
+      parser.error('Need to specify at least one input file')
+    input_paths = args
+  else:
+    if len(args) < 2:
+      parser.error(
+          'Need to specify output directory and at least one input file')
+    output_dir = args[0]
+    input_paths = args[1:]
+
+  if options.depfile:
+    python_deps = build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, input_paths + python_deps)
+
+  if options.srcjar:
+    if options.print_output_only:
+      parser.error('--print_output_only does not work with --srcjar')
+    if options.assert_files_list:
+      parser.error('--assert_file does not work with --srcjar')
+
+    with zipfile.ZipFile(options.srcjar, 'w', zipfile.ZIP_STORED) as srcjar:
+      for output_path, data in DoGenerate(input_paths):
+        build_utils.AddToZipHermetic(srcjar, output_path, data=data)
+  else:
+    # TODO(agrieve): Delete this non-srcjar branch once GYP is gone.
+    output_paths = []
+    for output_path, data in DoGenerate(input_paths):
+      full_path = os.path.join(output_dir, output_path)
+      output_paths.append(full_path)
+      if not options.print_output_only:
+        build_utils.MakeDirectory(os.path.dirname(full_path))
+        with open(full_path, 'w') as out_file:
+          out_file.write(data)
+
+    if options.assert_files_list:
+      AssertFilesList(output_paths, options.assert_files_list)
+
+    if options.verbose:
+      print 'Output paths:'
+      print '\n'.join(output_paths)
+
+    # Used by GYP.
+    return ' '.join(output_paths)
+
+
+if __name__ == '__main__':
+  DoMain(sys.argv[1:])
diff --git a/build/android/gyp/java_cpp_enum_tests.py b/build/android/gyp/java_cpp_enum_tests.py
new file mode 100755
index 0000000..902bbfa
--- /dev/null
+++ b/build/android/gyp/java_cpp_enum_tests.py
@@ -0,0 +1,438 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for enum_preprocess.py.
+
+This test suite containss various tests for the C++ -> Java enum generator.
+"""
+
+import collections
+from datetime import date
+import optparse
+import os
+import sys
+import unittest
+
+import java_cpp_enum
+from java_cpp_enum import EnumDefinition, GenerateOutput, GetScriptName
+from java_cpp_enum import HeaderParser
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+class TestPreprocess(unittest.TestCase):
+  def testOutput(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='some.package',
+                                entries=[('E1', 1), ('E2', '2 << 2')])
+    output = GenerateOutput('path/to/file', definition)
+    expected = """
+// Copyright %d The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     %s
+// From
+//     path/to/file
+
+package some.package;
+
+public class ClassName {
+  public static final int E1 = 1;
+  public static final int E2 = 2 << 2;
+}
+"""
+    self.assertEqual(expected % (date.today().year, GetScriptName()), output)
+
+  def testParseSimpleEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO,
+        VALUE_ONE,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
+                                              ('VALUE_ONE', 1)]),
+                     definition.entries)
+
+  def testParseBitShifts(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO = 1 << 0,
+        VALUE_ONE = 1 << 1,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'),
+                                              ('VALUE_ONE', '1 << 1')]),
+                     definition.entries)
+
+  def testParseClassNameOverride(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+      enum EnumName {
+        FOO
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride
+      enum PrefixTest {
+        PREFIX_TEST_A,
+        PREFIX_TEST_B,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('OverrideName', definition.class_name)
+
+    definition = definitions[1]
+    self.assertEqual('OtherOverride', definition.class_name)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseTwoEnums(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        ENUM_ONE_A = 1,
+        // Comment there
+        ENUM_ONE_B = A,
+      };
+
+      enum EnumIgnore {
+        C, D, E
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo {
+        P_A,
+        P_B
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumOne', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', 'A')]),
+                     definition.entries)
+
+    definition = definitions[1]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseThrowsOnUnknownDirective(self):
+    test_data = """
+      // GENERATED_JAVA_UNKNOWN: Value
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseReturnsEmptyListWithoutDirectives(self):
+    test_data = """
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    self.assertEqual([], HeaderParser(test_data).ParseDefinitions())
+
+  def testParseEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseEnumStruct(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum struct Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum Foo : int {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('int', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: unsigned short {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('unsigned short', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseUnknownFixedTypeRaises(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: foo_type {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseSimpleMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (te
+      //   st.name
+      //   space)
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+
+  def testParseMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: (
+      //   Ba
+      //   r
+      //   )
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMalformedMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirectiveShort(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testEnumValueAssignmentNoneDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentAllDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', '1')
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', '3')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', '2'),
+                                              ('C', '3')]),
+                     definition.entries)
+
+  def testEnumValueAssignmentReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', None)
+    definition.AppendEntry('D', 'C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 1),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSet(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 2),
+                                              ('C', 3)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSetReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', 'B')
+    definition.AppendEntry('D', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 0),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentRaises(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'foo')
+    definition.AppendEntry('C', None)
+    with self.assertRaises(Exception):
+      definition.Finalize()
+
+  def testExplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('P_A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('P_C', None)
+    definition.AppendEntry('P_LAST', 'P_C')
+    definition.prefix_to_strip = 'P_'
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='p')
+    definition.AppendEntry('CLASS_NAME_A', None)
+    definition.AppendEntry('CLASS_NAME_B', None)
+    definition.AppendEntry('CLASS_NAME_C', None)
+    definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
+    definition = EnumDefinition(original_enum_name='Name',
+                                enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('NAME_LAST', None)
+    definition.Finalize()
+    self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys())
+
+  def testGenerateThrowsOnEmptyInput(self):
+    with self.assertRaises(Exception):
+      original_do_parse = java_cpp_enum.DoParseHeaderFile
+      try:
+        java_cpp_enum.DoParseHeaderFile = lambda _: []
+        for _ in java_cpp_enum.DoGenerate(['file']):
+          pass
+      finally:
+        java_cpp_enum.DoParseHeaderFile = original_do_parse
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option("--stamp", help="File to touch on success.")
+  options, _ = parser.parse_args(argv)
+
+  suite = unittest.TestLoader().loadTestsFromTestCase(TestPreprocess)
+  unittest.TextTestRunner(verbosity=0).run(suite)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/build/android/gyp/java_google_api_keys.py b/build/android/gyp/java_google_api_keys.py
new file mode 100755
index 0000000..95cb416
--- /dev/null
+++ b/build/android/gyp/java_google_api_keys.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a Java file with API keys.
+
+import argparse
+import os
+import string
+import sys
+import zipfile
+
+from util import build_utils
+
+sys.path.append(
+    os.path.abspath(os.path.join(sys.path[0], '../../../google_apis')))
+import google_api_keys
+
+sys.path.append(os.path.abspath(os.path.join(
+    os.path.dirname(__file__), os.pardir)))
+from pylib.constants import host_paths
+
+
+PACKAGE = 'org.chromium.chrome'
+CLASSNAME = 'GoogleAPIKeys'
+
+
+def GetScriptName():
+  return os.path.relpath(__file__, host_paths.DIR_SOURCE_ROOT)
+
+
+def GenerateOutput(constant_definitions):
+  template = string.Template("""
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     ${SCRIPT_NAME}
+// From
+//     ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${CONSTANT_ENTRIES}
+}
+""")
+
+  constant_template = string.Template(
+      '  public static final String ${NAME} = "${VALUE}";')
+  constant_entries_list = []
+  for constant_name, constant_value in constant_definitions.iteritems():
+    values = {
+        'NAME': constant_name,
+        'VALUE': constant_value,
+    }
+    constant_entries_list.append(constant_template.substitute(values))
+  constant_entries_string = '\n'.join(constant_entries_list)
+
+  values = {
+      'CLASS_NAME': CLASSNAME,
+      'CONSTANT_ENTRIES': constant_entries_string,
+      'PACKAGE': PACKAGE,
+      'SCRIPT_NAME': GetScriptName(),
+      'SOURCE_PATH': 'google_api_keys/google_api_keys.h',
+  }
+  return template.substitute(values)
+
+
+def _DoWriteJavaOutput(output_path, constant_definition):
+  folder = os.path.dirname(output_path)
+  if folder and not os.path.exists(folder):
+    os.makedirs(folder)
+  with open(output_path, 'w') as out_file:
+    out_file.write(GenerateOutput(constant_definition))
+
+
+def _DoWriteJarOutput(output_path, constant_definition):
+  folder = os.path.dirname(output_path)
+  if folder and not os.path.exists(folder):
+    os.makedirs(folder)
+  with zipfile.ZipFile(output_path, 'w') as srcjar:
+    path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java')
+    data = GenerateOutput(constant_definition)
+    build_utils.AddToZipHermetic(srcjar, path, data=data)
+
+
+def _DoMain(argv):
+  parser = argparse.ArgumentParser()
+  parser.add_argument("--out", help="Path for java output.")
+  parser.add_argument("--srcjar", help="Path for srcjar output.")
+  options = parser.parse_args(argv)
+  if not options.out and not options.srcjar:
+    parser.print_help()
+    sys.exit(-1)
+
+  values = {}
+  values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey()
+  values['GOOGLE_API_KEY_REMOTING'] = google_api_keys.GetAPIKeyRemoting()
+  values['GOOGLE_API_KEY_PHYSICAL_WEB_TEST'] = (google_api_keys.
+      GetAPIKeyPhysicalWebTest())
+  values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN')
+  values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN')
+  values['GOOGLE_CLIENT_ID_CLOUD_PRINT'] = google_api_keys.GetClientID(
+      'CLOUD_PRINT')
+  values['GOOGLE_CLIENT_SECRET_CLOUD_PRINT'] = google_api_keys.GetClientSecret(
+      'CLOUD_PRINT')
+  values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING')
+  values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret(
+      'REMOTING')
+  values['GOOGLE_CLIENT_ID_REMOTING_HOST'] = google_api_keys.GetClientID(
+      'REMOTING_HOST')
+  values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys.
+      GetClientSecret('REMOTING_HOST'))
+  values['GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API'] = (google_api_keys.
+      GetClientID('REMOTING_IDENTITY_API'))
+
+  if options.out:
+    _DoWriteJavaOutput(options.out, values)
+  if options.srcjar:
+    _DoWriteJarOutput(options.srcjar, values)
+
+
+if __name__ == '__main__':
+  _DoMain(sys.argv[1:])
+
diff --git a/build/android/gyp/java_google_api_keys_tests.py b/build/android/gyp/java_google_api_keys_tests.py
new file mode 100755
index 0000000..eb24ea4
--- /dev/null
+++ b/build/android/gyp/java_google_api_keys_tests.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for java_google_api_keys.py.
+
+This test suite contains various tests for the C++ -> Java Google API Keys
+generator.
+"""
+
+import collections
+import argparse
+import os
+import sys
+import unittest
+
+import java_google_api_keys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+
+class TestJavaGoogleAPIKeys(unittest.TestCase):
+  def testOutput(self):
+    definition = {'E1': 'abc', 'E2': 'defgh'}
+    output = java_google_api_keys.GenerateOutput(definition)
+    expected = """
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     %s
+// From
+//     google_api_keys/google_api_keys.h
+
+package org.chromium.chrome;
+
+public class GoogleAPIKeys {
+  public static final String E1 = "abc";
+  public static final String E2 = "defgh";
+}
+"""
+    self.assertEqual(expected % java_google_api_keys.GetScriptName(), output)
+
+
+def main(argv):
+  parser = argparse.ArgumentParser()
+  parser.add_argument("--stamp", help="File to touch on success.")
+  options = parser.parse_args(argv)
+
+  suite = unittest.TestLoader().loadTestsFromTestCase(TestJavaGoogleAPIKeys)
+  unittest.TextTestRunner(verbosity=0).run(suite)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
+
diff --git a/build/android/gyp/javac.py b/build/android/gyp/javac.py
new file mode 100755
index 0000000..5722fb1
--- /dev/null
+++ b/build/android/gyp/javac.py
@@ -0,0 +1,393 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import shutil
+import re
+import sys
+import textwrap
+
+from util import build_utils
+from util import md5_check
+
+import jar
+
+sys.path.append(build_utils.COLORAMA_ROOT)
+import colorama
+
+
+def ColorJavacOutput(output):
+  fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)'
+  warning_re = re.compile(
+      fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$')
+  error_re = re.compile(
+      fileline_prefix + r'(?P<full_message> (?P<message>.*))$')
+  marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
+
+  warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM]
+  error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT]
+  marker_color = ['marker',  colorama.Fore.BLUE + colorama.Style.BRIGHT]
+
+  def Colorize(line, regex, color):
+    match = regex.match(line)
+    start = match.start(color[0])
+    end = match.end(color[0])
+    return (line[:start]
+            + color[1] + line[start:end]
+            + colorama.Fore.RESET + colorama.Style.RESET_ALL
+            + line[end:])
+
+  def ApplyColor(line):
+    if warning_re.match(line):
+      line = Colorize(line, warning_re, warning_color)
+    elif error_re.match(line):
+      line = Colorize(line, error_re, error_color)
+    elif marker_re.match(line):
+      line = Colorize(line, marker_re, marker_color)
+    return line
+
+  return '\n'.join(map(ApplyColor, output.split('\n')))
+
+
+ERRORPRONE_OPTIONS = [
+  # These crash on lots of targets.
+  '-Xep:ParameterPackage:OFF',
+  '-Xep:OverridesGuiceInjectableMethod:OFF',
+  '-Xep:OverridesJavaxInjectableMethod:OFF',
+]
+
+
+def _FilterJavaFiles(paths, filters):
+  return [f for f in paths
+          if not filters or build_utils.MatchesGlob(f, filters)]
+
+
+_MAX_MANIFEST_LINE_LEN = 72
+
+
+def _ExtractClassFiles(jar_path, dest_dir, java_files):
+  """Extracts all .class files not corresponding to |java_files|."""
+  # Two challenges exist here:
+  # 1. |java_files| have prefixes that are not represented in the the jar paths.
+  # 2. A single .java file results in multiple .class files when it contains
+  #    nested classes.
+  # Here's an example:
+  #   source path: ../../base/android/java/src/org/chromium/Foo.java
+  #   jar paths: org/chromium/Foo.class, org/chromium/Foo$Inner.class
+  # To extract only .class files not related to the given .java files, we strip
+  # off ".class" and "$*.class" and use a substring match against java_files.
+  def extract_predicate(path):
+    if not path.endswith('.class'):
+      return False
+    path_without_suffix = re.sub(r'(?:\$|\.)[^/]+class$', '', path)
+    partial_java_path = path_without_suffix + '.java'
+    return not any(p.endswith(partial_java_path) for p in java_files)
+
+  build_utils.ExtractAll(jar_path, path=dest_dir, predicate=extract_predicate)
+  for path in build_utils.FindInDirectory(dest_dir, '*.class'):
+    shutil.copystat(jar_path, path)
+
+
+def _ConvertToJMakeArgs(javac_cmd, pdb_path):
+  new_args = ['bin/jmake', '-pdb', pdb_path]
+  if javac_cmd[0] != 'javac':
+    new_args.extend(('-jcexec', new_args[0]))
+  if md5_check.PRINT_EXPLANATIONS:
+    new_args.append('-Xtiming')
+
+  do_not_prefix = ('-classpath', '-bootclasspath')
+  skip_next = False
+  for arg in javac_cmd[1:]:
+    if not skip_next and arg not in do_not_prefix:
+      arg = '-C' + arg
+    new_args.append(arg)
+    skip_next = arg in do_not_prefix
+
+  return new_args
+
+
+def _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir):
+  # The .pdb records absolute paths. Fix up paths within /tmp (srcjars).
+  if os.path.exists(pdb_path):
+    # Although its a binary file, search/replace still seems to work fine.
+    with open(pdb_path) as fileobj:
+      pdb_data = fileobj.read()
+    with open(pdb_path, 'w') as fileobj:
+      fileobj.write(re.sub(r'/tmp/[^/]*', temp_dir, pdb_data))
+
+
+def _OnStaleMd5(changes, options, javac_cmd, java_files, classpath_inputs):
+  with build_utils.TempDir() as temp_dir:
+    srcjars = options.java_srcjars
+    # The .excluded.jar contains .class files excluded from the main jar.
+    # It is used for incremental compiles.
+    excluded_jar_path = options.jar_path.replace('.jar', '.excluded.jar')
+
+    classes_dir = os.path.join(temp_dir, 'classes')
+    os.makedirs(classes_dir)
+
+    changed_paths = None
+    # jmake can handle deleted files, but it's a rare case and it would
+    # complicate this script's logic.
+    if options.incremental and changes.AddedOrModifiedOnly():
+      changed_paths = set(changes.IterChangedPaths())
+      # Do a full compile if classpath has changed.
+      # jmake doesn't seem to do this on its own... Might be that ijars mess up
+      # its change-detection logic.
+      if any(p in changed_paths for p in classpath_inputs):
+        changed_paths = None
+
+    if options.incremental:
+      # jmake is a compiler wrapper that figures out the minimal set of .java
+      # files that need to be rebuilt given a set of .java files that have
+      # changed.
+      # jmake determines what files are stale based on timestamps between .java
+      # and .class files. Since we use .jars, .srcjars, and md5 checks,
+      # timestamp info isn't accurate for this purpose. Rather than use jmake's
+      # programatic interface (like we eventually should), we ensure that all
+      # .class files are newer than their .java files, and convey to jmake which
+      # sources are stale by having their .class files be missing entirely
+      # (by not extracting them).
+      pdb_path = options.jar_path + '.pdb'
+      javac_cmd = _ConvertToJMakeArgs(javac_cmd, pdb_path)
+      if srcjars:
+        _FixTempPathsInIncrementalMetadata(pdb_path, temp_dir)
+
+    if srcjars:
+      java_dir = os.path.join(temp_dir, 'java')
+      os.makedirs(java_dir)
+      for srcjar in options.java_srcjars:
+        if changed_paths:
+          changed_paths.update(os.path.join(java_dir, f)
+                               for f in changes.IterChangedSubpaths(srcjar))
+        build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java')
+      jar_srcs = build_utils.FindInDirectory(java_dir, '*.java')
+      jar_srcs = _FilterJavaFiles(jar_srcs, options.javac_includes)
+      java_files.extend(jar_srcs)
+      if changed_paths:
+        # Set the mtime of all sources to 0 since we use the absense of .class
+        # files to tell jmake which files are stale.
+        for path in jar_srcs:
+          os.utime(path, (0, 0))
+
+    if java_files:
+      if changed_paths:
+        changed_java_files = [p for p in java_files if p in changed_paths]
+        if os.path.exists(options.jar_path):
+          _ExtractClassFiles(options.jar_path, classes_dir, changed_java_files)
+        if os.path.exists(excluded_jar_path):
+          _ExtractClassFiles(excluded_jar_path, classes_dir, changed_java_files)
+        # Add the extracted files to the classpath. This is required because
+        # when compiling only a subset of files, classes that haven't changed
+        # need to be findable.
+        classpath_idx = javac_cmd.index('-classpath')
+        javac_cmd[classpath_idx + 1] += ':' + classes_dir
+
+      # Can happen when a target goes from having no sources, to having sources.
+      # It's created by the call to build_utils.Touch() below.
+      if options.incremental:
+        if os.path.exists(pdb_path) and not os.path.getsize(pdb_path):
+          os.unlink(pdb_path)
+
+      # Don't include the output directory in the initial set of args since it
+      # being in a temp dir makes it unstable (breaks md5 stamping).
+      cmd = javac_cmd + ['-d', classes_dir] + java_files
+
+      # JMake prints out some diagnostic logs that we want to ignore.
+      # This assumes that all compiler output goes through stderr.
+      stdout_filter = lambda s: ''
+      if md5_check.PRINT_EXPLANATIONS:
+        stdout_filter = None
+
+      attempt_build = lambda: build_utils.CheckOutput(
+          cmd,
+          print_stdout=options.chromium_code,
+          stdout_filter=stdout_filter,
+          stderr_filter=ColorJavacOutput)
+      try:
+        attempt_build()
+      except build_utils.CalledProcessError as e:
+        # Work-around for a bug in jmake (http://crbug.com/551449).
+        if 'project database corrupted' not in e.output:
+          raise
+        print ('Applying work-around for jmake project database corrupted '
+               '(http://crbug.com/551449).')
+        os.unlink(pdb_path)
+        attempt_build()
+    elif options.incremental:
+      # Make sure output exists.
+      build_utils.Touch(pdb_path)
+
+    glob = options.jar_excluded_classes
+    inclusion_predicate = lambda f: not build_utils.MatchesGlob(f, glob)
+    exclusion_predicate = lambda f: not inclusion_predicate(f)
+
+    jar.JarDirectory(classes_dir,
+                     options.jar_path,
+                     predicate=inclusion_predicate)
+    jar.JarDirectory(classes_dir,
+                     excluded_jar_path,
+                     predicate=exclusion_predicate)
+
+
+def _ParseOptions(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option(
+      '--src-gendirs',
+      help='Directories containing generated java files.')
+  parser.add_option(
+      '--java-srcjars',
+      action='append',
+      default=[],
+      help='List of srcjars to include in compilation.')
+  parser.add_option(
+      '--bootclasspath',
+      action='append',
+      default=[],
+      help='Boot classpath for javac. If this is specified multiple times, '
+      'they will all be appended to construct the classpath.')
+  parser.add_option(
+      '--classpath',
+      action='append',
+      help='Classpath for javac. If this is specified multiple times, they '
+      'will all be appended to construct the classpath.')
+  parser.add_option(
+      '--incremental',
+      action='store_true',
+      help='Whether to re-use .class files rather than recompiling them '
+           '(when possible).')
+  parser.add_option(
+      '--javac-includes',
+      default='',
+      help='A list of file patterns. If provided, only java files that match'
+      'one of the patterns will be compiled.')
+  parser.add_option(
+      '--jar-excluded-classes',
+      default='',
+      help='List of .class file patterns to exclude from the jar.')
+  parser.add_option(
+      '--chromium-code',
+      type='int',
+      help='Whether code being compiled should be built with stricter '
+      'warnings for chromium code.')
+  parser.add_option(
+      '--use-errorprone-path',
+      help='Use the Errorprone compiler at this path.')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, args = parser.parse_args(argv)
+  build_utils.CheckOptions(options, parser, required=('jar_path',))
+
+  bootclasspath = []
+  for arg in options.bootclasspath:
+    bootclasspath += build_utils.ParseGypList(arg)
+  options.bootclasspath = bootclasspath
+
+  classpath = []
+  for arg in options.classpath:
+    classpath += build_utils.ParseGypList(arg)
+  options.classpath = classpath
+
+  java_srcjars = []
+  for arg in options.java_srcjars:
+    java_srcjars += build_utils.ParseGypList(arg)
+  options.java_srcjars = java_srcjars
+
+  if options.src_gendirs:
+    options.src_gendirs = build_utils.ParseGypList(options.src_gendirs)
+
+  options.javac_includes = build_utils.ParseGypList(options.javac_includes)
+  options.jar_excluded_classes = (
+      build_utils.ParseGypList(options.jar_excluded_classes))
+  return options, args
+
+
+def main(argv):
+  colorama.init()
+
+  argv = build_utils.ExpandFileArgs(argv)
+  options, java_files = _ParseOptions(argv)
+
+  if options.src_gendirs:
+    java_files += build_utils.FindInDirectories(options.src_gendirs, '*.java')
+
+  java_files = _FilterJavaFiles(java_files, options.javac_includes)
+
+  javac_cmd = ['javac']
+  if options.use_errorprone_path:
+    javac_cmd = [options.use_errorprone_path] + ERRORPRONE_OPTIONS
+
+  javac_cmd.extend((
+      '-g',
+      # Chromium only allows UTF8 source files.  Being explicit avoids
+      # javac pulling a default encoding from the user's environment.
+      '-encoding', 'UTF-8',
+      '-classpath', ':'.join(options.classpath),
+      # Prevent compiler from compiling .java files not listed as inputs.
+      # See: http://blog.ltgt.net/most-build-tools-misuse-javac/
+      '-sourcepath', ''
+  ))
+
+  if options.bootclasspath:
+    javac_cmd.extend([
+        '-bootclasspath', ':'.join(options.bootclasspath),
+        '-source', '1.7',
+        '-target', '1.7',
+        ])
+
+  if options.chromium_code:
+    javac_cmd.extend(['-Xlint:unchecked', '-Xlint:deprecation'])
+  else:
+    # XDignore.symbol.file makes javac compile against rt.jar instead of
+    # ct.sym. This means that using a java internal package/class will not
+    # trigger a compile warning or error.
+    javac_cmd.extend(['-XDignore.symbol.file'])
+
+  classpath_inputs = options.bootclasspath
+  if options.classpath:
+    if options.classpath[0].endswith('.interface.jar'):
+      classpath_inputs.extend(options.classpath)
+    else:
+      # TODO(agrieve): Remove this .TOC heuristic once GYP is no more.
+      for path in options.classpath:
+        if os.path.exists(path + '.TOC'):
+          classpath_inputs.append(path + '.TOC')
+        else:
+          classpath_inputs.append(path)
+
+  # Compute the list of paths that when changed, we need to rebuild.
+  input_paths = classpath_inputs + options.java_srcjars + java_files
+
+  output_paths = [
+      options.jar_path,
+      options.jar_path.replace('.jar', '.excluded.jar'),
+  ]
+  if options.incremental:
+    output_paths.append(options.jar_path + '.pdb')
+
+  # An escape hatch to be able to check if incremental compiles are causing
+  # problems.
+  force = int(os.environ.get('DISABLE_INCREMENTAL_JAVAC', 0))
+
+  # List python deps in input_strings rather than input_paths since the contents
+  # of them does not change what gets written to the depsfile.
+  build_utils.CallAndWriteDepfileIfStale(
+      lambda changes: _OnStaleMd5(changes, options, javac_cmd, java_files,
+                                  classpath_inputs),
+      options,
+      input_paths=input_paths,
+      input_strings=javac_cmd,
+      output_paths=output_paths,
+      force=force,
+      pass_changes=True)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/jinja_template.py b/build/android/gyp/jinja_template.py
new file mode 100755
index 0000000..7e9624b
--- /dev/null
+++ b/build/android/gyp/jinja_template.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Renders one or more template files using the Jinja template engine."""
+
+import codecs
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.constants import host_paths
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party'))
+import jinja2  # pylint: disable=F0401
+
+
+class RecordingFileSystemLoader(jinja2.FileSystemLoader):
+  '''A FileSystemLoader that stores a list of loaded templates.'''
+  def __init__(self, searchpath):
+    jinja2.FileSystemLoader.__init__(self, searchpath)
+    self.loaded_templates = set()
+
+  def get_source(self, environment, template):
+    contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
+        self, environment, template)
+    self.loaded_templates.add(os.path.relpath(filename))
+    return contents, filename, uptodate
+
+  def get_loaded_templates(self):
+    return list(self.loaded_templates)
+
+
+def ProcessFile(env, input_filename, loader_base_dir, output_filename,
+                variables):
+  input_rel_path = os.path.relpath(input_filename, loader_base_dir)
+  template = env.get_template(input_rel_path)
+  output = template.render(variables)
+  with codecs.open(output_filename, 'w', 'utf-8') as output_file:
+    output_file.write(output)
+
+
+def ProcessFiles(env, input_filenames, loader_base_dir, inputs_base_dir,
+                 outputs_zip, variables):
+  with build_utils.TempDir() as temp_dir:
+    for input_filename in input_filenames:
+      relpath = os.path.relpath(os.path.abspath(input_filename),
+                                os.path.abspath(inputs_base_dir))
+      if relpath.startswith(os.pardir):
+        raise Exception('input file %s is not contained in inputs base dir %s'
+                        % (input_filename, inputs_base_dir))
+
+      output_filename = os.path.join(temp_dir, relpath)
+      parent_dir = os.path.dirname(output_filename)
+      build_utils.MakeDirectory(parent_dir)
+      ProcessFile(env, input_filename, loader_base_dir, output_filename,
+                  variables)
+
+    build_utils.ZipDir(outputs_zip, temp_dir)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--inputs', help='The template files to process.')
+  parser.add_option('--output', help='The output file to generate. Valid '
+                    'only if there is a single input.')
+  parser.add_option('--outputs-zip', help='A zip file containing the processed '
+                    'templates. Required if there are multiple inputs.')
+  parser.add_option('--inputs-base-dir', help='A common ancestor directory of '
+                    'the inputs. Each output\'s path in the output zip will '
+                    'match the relative path from INPUTS_BASE_DIR to the '
+                    'input. Required if --output-zip is given.')
+  parser.add_option('--loader-base-dir', help='Base path used by the template '
+                    'loader. Must be a common ancestor directory of '
+                    'the inputs. Defaults to DIR_SOURCE_ROOT.',
+                    default=host_paths.DIR_SOURCE_ROOT)
+  parser.add_option('--variables', help='Variables to be made available in the '
+                    'template processing environment, as a GYP list (e.g. '
+                    '--variables "channel=beta mstone=39")', default='')
+  options, args = parser.parse_args()
+
+  build_utils.CheckOptions(options, parser, required=['inputs'])
+  inputs = build_utils.ParseGypList(options.inputs)
+
+  if (options.output is None) == (options.outputs_zip is None):
+    parser.error('Exactly one of --output and --output-zip must be given')
+  if options.output and len(inputs) != 1:
+    parser.error('--output cannot be used with multiple inputs')
+  if options.outputs_zip and not options.inputs_base_dir:
+    parser.error('--inputs-base-dir must be given when --output-zip is used')
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  variables = {}
+  for v in build_utils.ParseGypList(options.variables):
+    if '=' not in v:
+      parser.error('--variables argument must contain "=": ' + v)
+    name, _, value = v.partition('=')
+    variables[name] = value
+
+  loader = RecordingFileSystemLoader(options.loader_base_dir)
+  env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined,
+                           line_comment_prefix='##')
+  if options.output:
+    ProcessFile(env, inputs[0], options.loader_base_dir, options.output,
+                variables)
+  else:
+    ProcessFiles(env, inputs, options.loader_base_dir, options.inputs_base_dir,
+                 options.outputs_zip, variables)
+
+  if options.depfile:
+    deps = loader.get_loaded_templates() + build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, deps)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/lint.py b/build/android/gyp/lint.py
new file mode 100755
index 0000000..2efe9f8
--- /dev/null
+++ b/build/android/gyp/lint.py
@@ -0,0 +1,321 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs Android's lint tool."""
+
+
+import argparse
+import os
+import re
+import sys
+import traceback
+from xml.dom import minidom
+
+from util import build_utils
+
+_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long
+_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                         '..', '..', '..'))
+
+
+def _OnStaleMd5(lint_path, config_path, processed_config_path,
+                manifest_path, result_path, product_dir, sources, jar_path,
+                cache_dir, android_sdk_version, resource_dir=None,
+                classpath=None, can_fail_build=False, silent=False):
+  def _RelativizePath(path):
+    """Returns relative path to top-level src dir.
+
+    Args:
+      path: A path relative to cwd.
+    """
+    return os.path.relpath(os.path.abspath(path), _SRC_ROOT)
+
+  def _ProcessConfigFile():
+    if not config_path or not processed_config_path:
+      return
+    if not build_utils.IsTimeStale(processed_config_path, [config_path]):
+      return
+
+    with open(config_path, 'rb') as f:
+      content = f.read().replace(
+          'PRODUCT_DIR', _RelativizePath(product_dir))
+
+    with open(processed_config_path, 'wb') as f:
+      f.write(content)
+
+  def _ProcessResultFile():
+    with open(result_path, 'rb') as f:
+      content = f.read().replace(
+          _RelativizePath(product_dir), 'PRODUCT_DIR')
+
+    with open(result_path, 'wb') as f:
+      f.write(content)
+
+  def _ParseAndShowResultFile():
+    dom = minidom.parse(result_path)
+    issues = dom.getElementsByTagName('issue')
+    if not silent:
+      print >> sys.stderr
+      for issue in issues:
+        issue_id = issue.attributes['id'].value
+        message = issue.attributes['message'].value
+        location_elem = issue.getElementsByTagName('location')[0]
+        path = location_elem.attributes['file'].value
+        line = location_elem.getAttribute('line')
+        if line:
+          error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
+        else:
+          # Issues in class files don't have a line number.
+          error = '%s %s: %s [warning]' % (path, message, issue_id)
+        print >> sys.stderr, error.encode('utf-8')
+        for attr in ['errorLine1', 'errorLine2']:
+          error_line = issue.getAttribute(attr)
+          if error_line:
+            print >> sys.stderr, error_line.encode('utf-8')
+    return len(issues)
+
+  with build_utils.TempDir() as temp_dir:
+    _ProcessConfigFile()
+
+    cmd = [
+        _RelativizePath(lint_path), '-Werror', '--exitcode', '--showall',
+        '--xml', _RelativizePath(result_path),
+    ]
+    if jar_path:
+      # --classpath is just for .class files for this one target.
+      cmd.extend(['--classpath', _RelativizePath(jar_path)])
+    if processed_config_path:
+      cmd.extend(['--config', _RelativizePath(processed_config_path)])
+    if resource_dir:
+      cmd.extend(['--resources', _RelativizePath(resource_dir)])
+    if classpath:
+      # --libraries is the classpath (excluding active target).
+      cp = ':'.join(_RelativizePath(p) for p in classpath)
+      cmd.extend(['--libraries', cp])
+
+    # There may be multiple source files with the same basename (but in
+    # different directories). It is difficult to determine what part of the path
+    # corresponds to the java package, and so instead just link the source files
+    # into temporary directories (creating a new one whenever there is a name
+    # conflict).
+    src_dirs = []
+    def NewSourceDir():
+      new_dir = os.path.join(temp_dir, str(len(src_dirs)))
+      os.mkdir(new_dir)
+      src_dirs.append(new_dir)
+      return new_dir
+
+    def PathInDir(d, src):
+      return os.path.join(d, os.path.basename(src))
+
+    for src in sources:
+      src_dir = None
+      for d in src_dirs:
+        if not os.path.exists(PathInDir(d, src)):
+          src_dir = d
+          break
+      if not src_dir:
+        src_dir = NewSourceDir()
+        cmd.extend(['--sources', _RelativizePath(src_dir)])
+      os.symlink(os.path.abspath(src), PathInDir(src_dir, src))
+
+    project_dir = NewSourceDir()
+    if android_sdk_version:
+      # Create dummy project.properies file in a temporary "project" directory.
+      # It is the only way to add Android SDK to the Lint's classpath. Proper
+      # classpath is necessary for most source-level checks.
+      with open(os.path.join(project_dir, 'project.properties'), 'w') \
+          as propfile:
+        print >> propfile, 'target=android-{}'.format(android_sdk_version)
+
+    # Put the manifest in a temporary directory in order to avoid lint detecting
+    # sibling res/ and src/ directories (which should be pass explicitly if they
+    # are to be included).
+    if manifest_path:
+      os.symlink(os.path.abspath(manifest_path),
+                 PathInDir(project_dir, manifest_path))
+    cmd.append(project_dir)
+
+    if os.path.exists(result_path):
+      os.remove(result_path)
+
+    env = {}
+    stderr_filter = None
+    if cache_dir:
+      env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RelativizePath(cache_dir)
+      # When _JAVA_OPTIONS is set, java prints to stderr:
+      # Picked up _JAVA_OPTIONS: ...
+      #
+      # We drop all lines that contain _JAVA_OPTIONS from the output
+      stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l)
+
+    try:
+      build_utils.CheckOutput(cmd, cwd=_SRC_ROOT, env=env or None,
+                              stderr_filter=stderr_filter)
+    except build_utils.CalledProcessError:
+      # There is a problem with lint usage
+      if not os.path.exists(result_path):
+        raise
+
+      # Sometimes produces empty (almost) files:
+      if os.path.getsize(result_path) < 10:
+        if can_fail_build:
+          raise
+        elif not silent:
+          traceback.print_exc()
+        return
+
+      # There are actual lint issues
+      try:
+        num_issues = _ParseAndShowResultFile()
+      except Exception: # pylint: disable=broad-except
+        if not silent:
+          print 'Lint created unparseable xml file...'
+          print 'File contents:'
+          with open(result_path) as f:
+            print f.read()
+        if not can_fail_build:
+          return
+
+      if can_fail_build and not silent:
+        traceback.print_exc()
+
+      # There are actual lint issues
+      try:
+        num_issues = _ParseAndShowResultFile()
+      except Exception: # pylint: disable=broad-except
+        if not silent:
+          print 'Lint created unparseable xml file...'
+          print 'File contents:'
+          with open(result_path) as f:
+            print f.read()
+        raise
+
+      _ProcessResultFile()
+      msg = ('\nLint found %d new issues.\n'
+             ' - For full explanation, please refer to %s\n'
+             ' - For more information about lint and how to fix lint issues,'
+             ' please refer to %s\n' %
+             (num_issues,
+              _RelativizePath(result_path),
+              _LINT_MD_URL))
+      if not silent:
+        print >> sys.stderr, msg
+      if can_fail_build:
+        raise Exception('Lint failed.')
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_argument('--lint-path', required=True,
+                      help='Path to lint executable.')
+  parser.add_argument('--product-dir', required=True,
+                      help='Path to product dir.')
+  parser.add_argument('--result-path', required=True,
+                      help='Path to XML lint result file.')
+  parser.add_argument('--cache-dir', required=True,
+                      help='Path to the directory in which the android cache '
+                           'directory tree should be stored.')
+  parser.add_argument('--platform-xml-path', required=True,
+                      help='Path to api-platforms.xml')
+  parser.add_argument('--android-sdk-version',
+                      help='Version (API level) of the Android SDK used for '
+                           'building.')
+  parser.add_argument('--create-cache', action='store_true',
+                      help='Mark the lint cache file as an output rather than '
+                      'an input.')
+  parser.add_argument('--can-fail-build', action='store_true',
+                      help='If set, script will exit with nonzero exit status'
+                           ' if lint errors are present')
+  parser.add_argument('--config-path',
+                      help='Path to lint suppressions file.')
+  parser.add_argument('--enable', action='store_true',
+                      help='Run lint instead of just touching stamp.')
+  parser.add_argument('--jar-path',
+                      help='Jar file containing class files.')
+  parser.add_argument('--java-files',
+                      help='Paths to java files.')
+  parser.add_argument('--manifest-path',
+                      help='Path to AndroidManifest.xml')
+  parser.add_argument('--classpath', default=[], action='append',
+                      help='GYP-list of classpath .jar files')
+  parser.add_argument('--processed-config-path',
+                      help='Path to processed lint suppressions file.')
+  parser.add_argument('--resource-dir',
+                      help='Path to resource dir.')
+  parser.add_argument('--silent', action='store_true',
+                      help='If set, script will not log anything.')
+  parser.add_argument('--src-dirs',
+                      help='Directories containing java files.')
+  parser.add_argument('--stamp',
+                      help='Path to touch on success.')
+
+  args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+
+  if args.enable:
+    sources = []
+    if args.src_dirs:
+      src_dirs = build_utils.ParseGypList(args.src_dirs)
+      sources = build_utils.FindInDirectories(src_dirs, '*.java')
+    elif args.java_files:
+      sources = build_utils.ParseGypList(args.java_files)
+
+    if args.config_path and not args.processed_config_path:
+      parser.error('--config-path specified without --processed-config-path')
+    elif args.processed_config_path and not args.config_path:
+      parser.error('--processed-config-path specified without --config-path')
+
+    input_paths = [
+        args.lint_path,
+        args.platform_xml_path,
+    ]
+    if args.config_path:
+      input_paths.append(args.config_path)
+    if args.jar_path:
+      input_paths.append(args.jar_path)
+    if args.manifest_path:
+      input_paths.append(args.manifest_path)
+    if args.resource_dir:
+      input_paths.extend(build_utils.FindInDirectory(args.resource_dir, '*'))
+    if sources:
+      input_paths.extend(sources)
+    classpath = []
+    for gyp_list in args.classpath:
+      classpath.extend(build_utils.ParseGypList(gyp_list))
+    input_paths.extend(classpath)
+
+    input_strings = []
+    if args.android_sdk_version:
+      input_strings.append(args.android_sdk_version)
+    if args.processed_config_path:
+      input_strings.append(args.processed_config_path)
+
+    output_paths = [ args.result_path ]
+
+    build_utils.CallAndWriteDepfileIfStale(
+        lambda: _OnStaleMd5(args.lint_path,
+                            args.config_path,
+                            args.processed_config_path,
+                            args.manifest_path, args.result_path,
+                            args.product_dir, sources,
+                            args.jar_path,
+                            args.cache_dir,
+                            args.android_sdk_version,
+                            resource_dir=args.resource_dir,
+                            classpath=classpath,
+                            can_fail_build=args.can_fail_build,
+                            silent=args.silent),
+        args,
+        input_paths=input_paths,
+        input_strings=input_strings,
+        output_paths=output_paths,
+        depfile_deps=classpath)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/locale_pak_resources.py b/build/android/gyp/locale_pak_resources.py
new file mode 100755
index 0000000..84c4a37
--- /dev/null
+++ b/build/android/gyp/locale_pak_resources.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates a resources.zip for locale .pak files.
+
+Places the locale.pak files into appropriate resource configs
+(e.g. en-GB.pak -> res/raw-en/en_gb.lpak). Also generates a locale_paks
+TypedArray so that resource files can be enumerated at runtime.
+"""
+
+import collections
+import optparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+# This should stay in sync with:
+# base/android/java/src/org/chromium/base/LocaleUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+    'he': 'iw',
+    'id': 'in',
+    'fil': 'tl',
+}
+
+
+def ToResourceFileName(name):
+  """Returns the resource-compatible file name for the given file."""
+  # Resources file names must consist of [a-z0-9_.].
+  # Changes extension to .lpak so that compression can be toggled separately for
+  # locale pak files vs other pak files.
+  return name.replace('-', '_').replace('.pak', '.lpak').lower()
+
+
+def CreateLocalePaksXml(names):
+  """Creates the contents for the locale-paks.xml files."""
+  VALUES_FILE_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+  <array name="locale_paks">%s
+  </array>
+</resources>
+'''
+  VALUES_ITEM_TEMPLATE = '''
+    <item>@raw/%s</item>'''
+
+  res_names = (os.path.splitext(name)[0] for name in names)
+  items = ''.join((VALUES_ITEM_TEMPLATE % name for name in res_names))
+  return VALUES_FILE_TEMPLATE % items
+
+
+def ComputeMappings(sources):
+  """Computes the mappings of sources -> resources.
+
+  Returns a tuple of:
+    - mappings: List of (src, dest) paths
+    - lang_to_locale_map: Map of language -> list of resource names
+      e.g. "en" -> ["en_gb.lpak"]
+  """
+  lang_to_locale_map = collections.defaultdict(list)
+  mappings = []
+  for src_path in sources:
+    basename = os.path.basename(src_path)
+    name = os.path.splitext(basename)[0]
+    res_name = ToResourceFileName(basename)
+    if name == 'en-US':
+      dest_dir = 'raw'
+    else:
+      # Chrome's uses different region mapping logic from Android, so include
+      # all regions for each language.
+      android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(name, name)
+      lang = android_locale[0:2]
+      dest_dir = 'raw-' + lang
+      lang_to_locale_map[lang].append(res_name)
+    mappings.append((src_path, os.path.join(dest_dir, res_name)))
+  return mappings, lang_to_locale_map
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--locale-paks', help='List of files for res/raw-LOCALE')
+  parser.add_option('--resources-zip', help='Path to output resources.zip')
+  parser.add_option('--print-languages',
+      action='store_true',
+      help='Print out the list of languages that cover the given locale paks '
+           '(using Android\'s language codes)')
+
+  options, _ = parser.parse_args()
+  build_utils.CheckOptions(options, parser,
+                           required=['locale_paks'])
+
+  sources = build_utils.ParseGypList(options.locale_paks)
+
+  if options.depfile:
+    deps = sources + build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, deps)
+
+  mappings, lang_to_locale_map = ComputeMappings(sources)
+  if options.print_languages:
+    print '\n'.join(sorted(lang_to_locale_map))
+
+  if options.resources_zip:
+    with zipfile.ZipFile(options.resources_zip, 'w', zipfile.ZIP_STORED) as out:
+      for mapping in mappings:
+        out.write(mapping[0], mapping[1])
+
+      # Create TypedArray resources so ResourceExtractor can enumerate files.
+      def WriteValuesFile(lang, names):
+        dest_dir = 'values'
+        if lang:
+          dest_dir += '-' + lang
+        # Always extract en-US.lpak since it's the fallback.
+        xml = CreateLocalePaksXml(names + ['en_us.lpak'])
+        out.writestr(os.path.join(dest_dir, 'locale-paks.xml'), xml)
+
+      for lang, names in lang_to_locale_map.iteritems():
+        WriteValuesFile(lang, names)
+      WriteValuesFile(None, [])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/main_dex_list.py b/build/android/gyp/main_dex_list.py
new file mode 100755
index 0000000..7388f4a
--- /dev/null
+++ b/build/android/gyp/main_dex_list.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import os
+import sys
+import tempfile
+
+from util import build_utils
+
+sys.path.append(os.path.abspath(os.path.join(
+    os.path.dirname(__file__), os.pardir)))
+from pylib import constants
+
+
+def main(args):
+  parser = argparse.ArgumentParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_argument('--android-sdk-tools', required=True,
+                      help='Android sdk build tools directory.')
+  parser.add_argument('--main-dex-rules-path', action='append', default=[],
+                      dest='main_dex_rules_paths',
+                      help='A file containing a list of proguard rules to use '
+                           'in determining the class to include in the '
+                           'main dex.')
+  parser.add_argument('--main-dex-list-path', required=True,
+                      help='The main dex list file to generate.')
+  parser.add_argument('--enabled-configurations',
+                      help='The build configurations for which a main dex list'
+                           ' should be generated.')
+  parser.add_argument('--configuration-name',
+                      help='The current build configuration.')
+  parser.add_argument('--multidex-configuration-path',
+                      help='A JSON file containing multidex build '
+                           'configuration.')
+  parser.add_argument('--inputs',
+                      help='JARs for which a main dex list should be '
+                           'generated.')
+  parser.add_argument('paths', nargs='*', default=[],
+                      help='JARs for which a main dex list should be '
+                           'generated.')
+
+  args = parser.parse_args(build_utils.ExpandFileArgs(args))
+
+  if args.multidex_configuration_path:
+    with open(args.multidex_configuration_path) as multidex_config_file:
+      multidex_config = json.loads(multidex_config_file.read())
+
+    if not multidex_config.get('enabled', False):
+      return 0
+
+  if args.inputs:
+    args.paths.extend(build_utils.ParseGypList(args.inputs))
+
+  shrinked_android_jar = os.path.abspath(
+      os.path.join(args.android_sdk_tools, 'lib', 'shrinkedAndroid.jar'))
+  dx_jar = os.path.abspath(
+      os.path.join(args.android_sdk_tools, 'lib', 'dx.jar'))
+  rules_file = os.path.abspath(
+      os.path.join(args.android_sdk_tools, 'mainDexClasses.rules'))
+
+  proguard_cmd = [
+    constants.PROGUARD_SCRIPT_PATH,
+    '-forceprocessing',
+    '-dontwarn', '-dontoptimize', '-dontobfuscate', '-dontpreverify',
+    '-libraryjars', shrinked_android_jar,
+    '-include', rules_file,
+  ]
+  for m in args.main_dex_rules_paths:
+    proguard_cmd.extend(['-include', m])
+
+  main_dex_list_cmd = [
+    'java', '-cp', dx_jar,
+    'com.android.multidex.MainDexListBuilder',
+  ]
+
+  input_paths = list(args.paths)
+  input_paths += [
+    shrinked_android_jar,
+    dx_jar,
+    rules_file,
+  ]
+  input_paths += args.main_dex_rules_paths
+
+  input_strings = [
+    proguard_cmd,
+    main_dex_list_cmd,
+  ]
+
+  output_paths = [
+    args.main_dex_list_path,
+  ]
+
+  build_utils.CallAndWriteDepfileIfStale(
+      lambda: _OnStaleMd5(proguard_cmd, main_dex_list_cmd, args.paths,
+                          args.main_dex_list_path),
+      args,
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=output_paths)
+
+  return 0
+
+
+def _OnStaleMd5(proguard_cmd, main_dex_list_cmd, paths, main_dex_list_path):
+  paths_arg = ':'.join(paths)
+  main_dex_list = ''
+  try:
+    with tempfile.NamedTemporaryFile(suffix='.jar') as temp_jar:
+      proguard_cmd += [
+        '-injars', paths_arg,
+        '-outjars', temp_jar.name
+      ]
+      build_utils.CheckOutput(proguard_cmd, print_stderr=False)
+
+      main_dex_list_cmd += [
+        temp_jar.name, paths_arg
+      ]
+      main_dex_list = build_utils.CheckOutput(main_dex_list_cmd)
+  except build_utils.CalledProcessError as e:
+    if 'output jar is empty' in e.output:
+      pass
+    elif "input doesn't contain any classes" in e.output:
+      pass
+    else:
+      raise
+
+  with open(main_dex_list_path, 'w') as main_dex_list_file:
+    main_dex_list_file.write(main_dex_list)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
+
diff --git a/build/android/gyp/pack_relocations.py b/build/android/gyp/pack_relocations.py
new file mode 100755
index 0000000..1a4824a
--- /dev/null
+++ b/build/android/gyp/pack_relocations.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pack relocations in a library (or copy unchanged).
+
+If --enable-packing and --configuration-name=='Release', invoke the
+relocation_packer tool to pack the .rel.dyn or .rela.dyn section in the given
+library files.  This step is inserted after the libraries are stripped.
+
+If --enable-packing is zero, the script copies files verbatim, with no
+attempt to pack relocations.
+
+Any library listed in --exclude-packing-list is also copied verbatim,
+irrespective of any --enable-packing setting.  Typically this would be
+'libchromium_android_linker.so'.
+"""
+
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+def PackLibraryRelocations(android_pack_relocations, library_path, output_path):
+  shutil.copy(library_path, output_path)
+  pack_command = [android_pack_relocations, output_path]
+  build_utils.CheckOutput(pack_command)
+
+
+def CopyLibraryUnchanged(library_path, output_path):
+  shutil.copy(library_path, output_path)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--clear-dir', action='store_true',
+                    help='If set, the destination directory will be deleted '
+                    'before copying files to it. This is highly recommended to '
+                    'ensure that no stale files are left in the directory.')
+
+  parser.add_option('--configuration-name',
+      default='Release',
+      help='Gyp configuration name (i.e. Debug, Release)')
+  parser.add_option('--enable-packing',
+      choices=['0', '1'],
+      help=('Pack relocations if 1 and configuration name is \'Release\','
+            ' otherwise plain file copy'))
+  parser.add_option('--exclude-packing-list',
+      default='',
+      help='Names of any libraries explicitly not packed')
+  parser.add_option('--android-pack-relocations',
+      help='Path to the relocations packer binary')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory for stripped libraries')
+  parser.add_option('--packed-libraries-dir',
+      help='Directory for packed libraries')
+  parser.add_option('--libraries', action='append',
+      help='List of libraries')
+  parser.add_option('--stamp', help='Path to touch on success')
+  parser.add_option('--filelistjson',
+                    help='Output path of filelist.json to write')
+
+  options, _ = parser.parse_args(args)
+  enable_packing = (options.enable_packing == '1' and
+                    options.configuration_name == 'Release')
+  exclude_packing_set = set(build_utils.ParseGypList(
+      options.exclude_packing_list))
+
+  libraries = []
+  for libs_arg in options.libraries:
+    libraries += build_utils.ParseGypList(libs_arg)
+
+  if options.clear_dir:
+    build_utils.DeleteDirectory(options.packed_libraries_dir)
+
+  build_utils.MakeDirectory(options.packed_libraries_dir)
+
+  output_paths = []
+  for library in libraries:
+    library_path = os.path.join(options.stripped_libraries_dir, library)
+    output_path = os.path.join(
+        options.packed_libraries_dir, os.path.basename(library))
+    output_paths.append(output_path)
+
+    if enable_packing and library not in exclude_packing_set:
+      PackLibraryRelocations(options.android_pack_relocations,
+                             library_path,
+                             output_path)
+    else:
+      CopyLibraryUnchanged(library_path, output_path)
+
+  if options.filelistjson:
+    build_utils.WriteJson({ 'files': output_paths }, options.filelistjson)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        libraries + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/package_resources.py b/build/android/gyp/package_resources.py
new file mode 100755
index 0000000..08a2537
--- /dev/null
+++ b/build/android/gyp/package_resources.py
@@ -0,0 +1,325 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=C0301
+"""Package resources into an apk.
+
+See https://android.googlesource.com/platform/tools/base/+/master/legacy/ant-tasks/src/main/java/com/android/ant/AaptExecTask.java
+and
+https://android.googlesource.com/platform/sdk/+/master/files/ant/build.xml
+"""
+# pylint: enable=C0301
+
+import optparse
+import os
+import re
+import shutil
+import sys
+import zipfile
+
+from util import build_utils
+
+
+# List is generated from the chrome_apk.apk_intermediates.ap_ via:
+#     unzip -l $FILE_AP_ | cut -c31- | grep res/draw | cut -d'/' -f 2 | sort \
+#     | uniq | grep -- -tvdpi- | cut -c10-
+# and then manually sorted.
+# Note that we can't just do a cross-product of dimentions because the filenames
+# become too big and aapt fails to create the files.
+# This leaves all default drawables (mdpi) in the main apk. Android gets upset
+# though if any drawables are missing from the default drawables/ directory.
+DENSITY_SPLITS = {
+    'hdpi': (
+        'hdpi-v4', # Order matters for output file names.
+        'ldrtl-hdpi-v4',
+        'sw600dp-hdpi-v13',
+        'ldrtl-hdpi-v17',
+        'ldrtl-sw600dp-hdpi-v17',
+        'hdpi-v21',
+    ),
+    'xhdpi': (
+        'xhdpi-v4',
+        'ldrtl-xhdpi-v4',
+        'sw600dp-xhdpi-v13',
+        'ldrtl-xhdpi-v17',
+        'ldrtl-sw600dp-xhdpi-v17',
+        'xhdpi-v21',
+    ),
+    'xxhdpi': (
+        'xxhdpi-v4',
+        'ldrtl-xxhdpi-v4',
+        'sw600dp-xxhdpi-v13',
+        'ldrtl-xxhdpi-v17',
+        'ldrtl-sw600dp-xxhdpi-v17',
+        'xxhdpi-v21',
+    ),
+    'xxxhdpi': (
+        'xxxhdpi-v4',
+        'ldrtl-xxxhdpi-v4',
+        'sw600dp-xxxhdpi-v13',
+        'ldrtl-xxxhdpi-v17',
+        'ldrtl-sw600dp-xxxhdpi-v17',
+        'xxxhdpi-v21',
+    ),
+    'tvdpi': (
+        'tvdpi-v4',
+        'sw600dp-tvdpi-v13',
+        'ldrtl-sw600dp-tvdpi-v17',
+    ),
+}
+
+
+def _ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--android-sdk-jar',
+                    help='path to the Android SDK jar.')
+  parser.add_option('--aapt-path',
+                    help='path to the Android aapt tool')
+
+  parser.add_option('--configuration-name',
+                    help='Gyp\'s configuration name (Debug or Release).')
+
+  parser.add_option('--android-manifest', help='AndroidManifest.xml path')
+  parser.add_option('--version-code', help='Version code for apk.')
+  parser.add_option('--version-name', help='Version name for apk.')
+  parser.add_option(
+      '--shared-resources',
+      action='store_true',
+      help='Make a resource package that can be loaded by a different'
+      'application at runtime to access the package\'s resources.')
+  parser.add_option(
+      '--app-as-shared-lib',
+      action='store_true',
+      help='Make a resource package that can be loaded as shared library')
+  parser.add_option('--resource-zips',
+                    default='[]',
+                    help='zip files containing resources to be packaged')
+  parser.add_option('--asset-dir',
+                    help='directories containing assets to be packaged')
+  parser.add_option('--no-compress', help='disables compression for the '
+                    'given comma separated list of extensions')
+  parser.add_option(
+      '--create-density-splits',
+      action='store_true',
+      help='Enables density splits')
+  parser.add_option('--language-splits',
+                    default='[]',
+                    help='GYP list of languages to create splits for')
+
+  parser.add_option('--apk-path',
+                    help='Path to output (partial) apk.')
+
+  options, positional_args = parser.parse_args(args)
+
+  if positional_args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('android_sdk_jar', 'aapt_path', 'configuration_name',
+                      'android_manifest', 'version_code', 'version_name',
+                      'apk_path')
+
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  options.resource_zips = build_utils.ParseGypList(options.resource_zips)
+  options.language_splits = build_utils.ParseGypList(options.language_splits)
+  return options
+
+
+def MoveImagesToNonMdpiFolders(res_root):
+  """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+  Why? http://crbug.com/289843
+  """
+  for src_dir_name in os.listdir(res_root):
+    src_components = src_dir_name.split('-')
+    if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+      continue
+    src_dir = os.path.join(res_root, src_dir_name)
+    if not os.path.isdir(src_dir):
+      continue
+    dst_components = [c for c in src_components if c != 'mdpi']
+    assert dst_components != src_components
+    dst_dir_name = '-'.join(dst_components)
+    dst_dir = os.path.join(res_root, dst_dir_name)
+    build_utils.MakeDirectory(dst_dir)
+    for src_file_name in os.listdir(src_dir):
+      if not src_file_name.endswith('.png'):
+        continue
+      src_file = os.path.join(src_dir, src_file_name)
+      dst_file = os.path.join(dst_dir, src_file_name)
+      assert not os.path.lexists(dst_file)
+      shutil.move(src_file, dst_file)
+
+
+def PackageArgsForExtractedZip(d):
+  """Returns the aapt args for an extracted resources zip.
+
+  A resources zip either contains the resources for a single target or for
+  multiple targets. If it is multiple targets merged into one, the actual
+  resource directories will be contained in the subdirectories 0, 1, 2, ...
+  """
+  subdirs = [os.path.join(d, s) for s in os.listdir(d)]
+  subdirs = [s for s in subdirs if os.path.isdir(s)]
+  is_multi = '0' in [os.path.basename(s) for s in subdirs]
+  if is_multi:
+    res_dirs = sorted(subdirs, key=lambda p : int(os.path.basename(p)))
+  else:
+    res_dirs = [d]
+  package_command = []
+  for d in res_dirs:
+    MoveImagesToNonMdpiFolders(d)
+    package_command += ['-S', d]
+  return package_command
+
+
+def _GenerateDensitySplitPaths(apk_path):
+  for density, config in DENSITY_SPLITS.iteritems():
+    src_path = '%s_%s' % (apk_path, '_'.join(config))
+    dst_path = '%s_%s' % (apk_path, density)
+    yield src_path, dst_path
+
+
+def _GenerateLanguageSplitOutputPaths(apk_path, languages):
+  for lang in languages:
+    yield '%s_%s' % (apk_path, lang)
+
+
+def RenameDensitySplits(apk_path):
+  """Renames all density splits to have shorter / predictable names."""
+  for src_path, dst_path in _GenerateDensitySplitPaths(apk_path):
+    shutil.move(src_path, dst_path)
+
+
+def CheckForMissedConfigs(apk_path, check_density, languages):
+  """Raises an exception if apk_path contains any unexpected configs."""
+  triggers = []
+  if check_density:
+    triggers.extend(re.compile('-%s' % density) for density in DENSITY_SPLITS)
+  if languages:
+    triggers.extend(re.compile(r'-%s\b' % lang) for lang in languages)
+  with zipfile.ZipFile(apk_path) as main_apk_zip:
+    for name in main_apk_zip.namelist():
+      for trigger in triggers:
+        if trigger.search(name) and not 'mipmap-' in name:
+          raise Exception(('Found config in main apk that should have been ' +
+                           'put into a split: %s\nYou need to update ' +
+                           'package_resources.py to include this new ' +
+                           'config (trigger=%s)') % (name, trigger.pattern))
+
+
+def _ConstructMostAaptArgs(options):
+  package_command = [
+      options.aapt_path,
+      'package',
+      '--version-code', options.version_code,
+      '--version-name', options.version_name,
+      '-M', options.android_manifest,
+      '--no-crunch',
+      '-f',
+      '--auto-add-overlay',
+      '--no-version-vectors',
+      '-I', options.android_sdk_jar,
+      '-F', options.apk_path,
+      '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN,
+  ]
+
+  if options.no_compress:
+    for ext in options.no_compress.split(','):
+      package_command += ['-0', ext]
+
+  if options.shared_resources:
+    package_command.append('--shared-lib')
+
+  if options.app_as_shared_lib:
+    package_command.append('--app-as-shared-lib')
+
+  if options.asset_dir and os.path.exists(options.asset_dir):
+    package_command += ['-A', options.asset_dir]
+
+  if options.create_density_splits:
+    for config in DENSITY_SPLITS.itervalues():
+      package_command.extend(('--split', ','.join(config)))
+
+  if options.language_splits:
+    for lang in options.language_splits:
+      package_command.extend(('--split', lang))
+
+  if 'Debug' in options.configuration_name:
+    package_command += ['--debug-mode']
+
+  return package_command
+
+
+def _OnStaleMd5(package_command, options):
+  with build_utils.TempDir() as temp_dir:
+    if options.resource_zips:
+      dep_zips = options.resource_zips
+      for z in dep_zips:
+        subdir = os.path.join(temp_dir, os.path.basename(z))
+        if os.path.exists(subdir):
+          raise Exception('Resource zip name conflict: ' + os.path.basename(z))
+        build_utils.ExtractAll(z, path=subdir)
+        package_command += PackageArgsForExtractedZip(subdir)
+
+    build_utils.CheckOutput(
+        package_command, print_stdout=False, print_stderr=False)
+
+    if options.create_density_splits or options.language_splits:
+      CheckForMissedConfigs(options.apk_path, options.create_density_splits,
+                            options.language_splits)
+
+    if options.create_density_splits:
+      RenameDensitySplits(options.apk_path)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  package_command = _ConstructMostAaptArgs(options)
+
+  output_paths = [ options.apk_path ]
+
+  if options.create_density_splits:
+    for _, dst_path in _GenerateDensitySplitPaths(options.apk_path):
+      output_paths.append(dst_path)
+  output_paths.extend(
+      _GenerateLanguageSplitOutputPaths(options.apk_path,
+                                        options.language_splits))
+
+  input_paths = [ options.android_manifest ] + options.resource_zips
+
+  input_strings = []
+  input_strings.extend(package_command)
+
+  # The md5_check.py doesn't count file path in md5 intentionally,
+  # in order to repackage resources when assets' name changed, we need
+  # to put assets into input_strings, as we know the assets path isn't
+  # changed among each build if there is no asset change.
+  if options.asset_dir and os.path.exists(options.asset_dir):
+    asset_paths = []
+    for root, _, filenames in os.walk(options.asset_dir):
+      asset_paths.extend(os.path.join(root, f) for f in filenames)
+    input_paths.extend(asset_paths)
+    input_strings.extend(sorted(asset_paths))
+
+  build_utils.CallAndWriteDepfileIfStale(
+      lambda: _OnStaleMd5(package_command, options),
+      options,
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=output_paths)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/build/android/gyp/process_resources.py b/build/android/gyp/process_resources.py
new file mode 100755
index 0000000..f8971aa
--- /dev/null
+++ b/build/android/gyp/process_resources.py
@@ -0,0 +1,503 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resources to generate R.java, and prepare for packaging.
+
+This will crunch images and generate v14 compatible resources
+(see generate_v14_compatible_resources.py).
+"""
+
+import codecs
+import collections
+import optparse
+import os
+import re
+import shutil
+import sys
+
+import generate_v14_compatible_resources
+
+from util import build_utils
+
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1,
+    os.path.join(os.path.dirname(__file__), '../../../third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+# Represents a line from a R.txt file.
+TextSymbolsEntry = collections.namedtuple('RTextEntry',
+    ('java_type', 'resource_type', 'name', 'value'))
+
+
+def _ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-sdk-jar',
+                    help='the path to android jar file.')
+  parser.add_option('--aapt-path',
+                    help='path to the Android aapt tool')
+  parser.add_option('--non-constant-id', action='store_true')
+
+  parser.add_option('--android-manifest', help='AndroidManifest.xml path')
+  parser.add_option('--custom-package', help='Java package for R.java')
+  parser.add_option(
+      '--shared-resources',
+      action='store_true',
+      help='Make a resource package that can be loaded by a different'
+      'application at runtime to access the package\'s resources.')
+  parser.add_option(
+      '--app-as-shared-lib',
+      action='store_true',
+      help='Make a resource package that can be loaded as shared library.')
+
+  parser.add_option('--resource-dirs',
+                    help='Directories containing resources of this target.')
+  parser.add_option('--dependencies-res-zips',
+                    help='Resources from dependents.')
+
+  parser.add_option('--resource-zip-out',
+                    help='Path for output zipped resources.')
+
+  parser.add_option('--R-dir',
+                    help='directory to hold generated R.java.')
+  parser.add_option('--srcjar-out',
+                    help='Path to srcjar to contain generated R.java.')
+  parser.add_option('--r-text-out',
+                    help='Path to store the R.txt file generated by appt.')
+
+  parser.add_option('--proguard-file',
+                    help='Path to proguard.txt generated file')
+
+  parser.add_option(
+      '--v14-skip',
+      action="store_true",
+      help='Do not generate nor verify v14 resources')
+
+  parser.add_option(
+      '--extra-res-packages',
+      help='Additional package names to generate R.java files for')
+  parser.add_option(
+      '--extra-r-text-files',
+      help='For each additional package, the R.txt file should contain a '
+      'list of resources to be included in the R.java file in the format '
+      'generated by aapt')
+  parser.add_option(
+      '--include-all-resources',
+      action='store_true',
+      help='Include every resource ID in every generated R.java file '
+      '(ignoring R.txt).')
+
+  parser.add_option(
+      '--all-resources-zip-out',
+      help='Path for output of all resources. This includes resources in '
+      'dependencies.')
+
+  parser.add_option('--stamp', help='File to touch on success')
+
+  options, positional_args = parser.parse_args(args)
+
+  if positional_args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = (
+      'android_sdk_jar',
+      'aapt_path',
+      'android_manifest',
+      'dependencies_res_zips',
+      'resource_dirs',
+      'resource_zip_out',
+      )
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  if (options.R_dir is None) == (options.srcjar_out is None):
+    raise Exception('Exactly one of --R-dir or --srcjar-out must be specified.')
+
+  options.resource_dirs = build_utils.ParseGypList(options.resource_dirs)
+  options.dependencies_res_zips = (
+      build_utils.ParseGypList(options.dependencies_res_zips))
+
+  # Don't use [] as default value since some script explicitly pass "".
+  if options.extra_res_packages:
+    options.extra_res_packages = (
+        build_utils.ParseGypList(options.extra_res_packages))
+  else:
+    options.extra_res_packages = []
+
+  if options.extra_r_text_files:
+    options.extra_r_text_files = (
+        build_utils.ParseGypList(options.extra_r_text_files))
+  else:
+    options.extra_r_text_files = []
+
+  return options
+
+
+def CreateExtraRJavaFiles(
+      r_dir, extra_packages, extra_r_text_files, shared_resources, include_all):
+  if include_all:
+    java_files = build_utils.FindInDirectory(r_dir, "R.java")
+    if len(java_files) != 1:
+      return
+    r_java_file = java_files[0]
+    r_java_contents = codecs.open(r_java_file, encoding='utf-8').read()
+
+    for package in extra_packages:
+      package_r_java_dir = os.path.join(r_dir, *package.split('.'))
+      build_utils.MakeDirectory(package_r_java_dir)
+      package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+      new_r_java = re.sub(r'package [.\w]*;', u'package %s;' % package,
+                          r_java_contents)
+      codecs.open(package_r_java_path, 'w', encoding='utf-8').write(new_r_java)
+  else:
+    if len(extra_packages) != len(extra_r_text_files):
+      raise Exception('Need one R.txt file per extra package')
+
+    r_txt_file = os.path.join(r_dir, 'R.txt')
+    if not os.path.exists(r_txt_file):
+      return
+
+    # Map of (resource_type, name) -> Entry.
+    # Contains the correct values for resources.
+    all_resources = {}
+    for entry in _ParseTextSymbolsFile(r_txt_file):
+      all_resources[(entry.resource_type, entry.name)] = entry
+
+    # Map of package_name->resource_type->entry
+    resources_by_package = (
+        collections.defaultdict(lambda: collections.defaultdict(list)))
+    # Build the R.java files using each package's R.txt file, but replacing
+    # each entry's placeholder value with correct values from all_resources.
+    for package, r_text_file in zip(extra_packages, extra_r_text_files):
+      if not os.path.exists(r_text_file):
+        continue
+      if package in resources_by_package:
+        raise Exception(('Package name "%s" appeared twice. All '
+                         'android_resources() targets must use unique package '
+                         'names, or no package name at all.') % package)
+      resources_by_type = resources_by_package[package]
+      # The sub-R.txt files have the wrong values at this point. Read them to
+      # figure out which entries belong to them, but use the values from the
+      # main R.txt file.
+      for entry in _ParseTextSymbolsFile(r_text_file):
+        entry = all_resources[(entry.resource_type, entry.name)]
+        resources_by_type[entry.resource_type].append(entry)
+
+    for package, resources_by_type in resources_by_package.iteritems():
+      package_r_java_dir = os.path.join(r_dir, *package.split('.'))
+      build_utils.MakeDirectory(package_r_java_dir)
+      package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+      java_file_contents = _CreateExtraRJavaFile(
+          package, resources_by_type, shared_resources)
+      with open(package_r_java_path, 'w') as f:
+        f.write(java_file_contents)
+
+
+def _ParseTextSymbolsFile(path):
+  """Given an R.txt file, returns a list of TextSymbolsEntry."""
+  ret = []
+  with open(path) as f:
+    for line in f:
+      m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+      if not m:
+        raise Exception('Unexpected line in R.txt: %s' % line)
+      java_type, resource_type, name, value = m.groups()
+      ret.append(TextSymbolsEntry(java_type, resource_type, name, value))
+  return ret
+
+
+def _CreateExtraRJavaFile(package, resources_by_type, shared_resources):
+  """Generates the contents of a R.java file."""
+  template = Template("""/* AUTO-GENERATED FILE.  DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+    {% for resource_type in resources %}
+    public static final class {{ resource_type }} {
+        {% for e in resources[resource_type] %}
+        {% if shared_resources %}
+        public static {{ e.java_type }} {{ e.name }} = {{ e.value }};
+        {% else %}
+        public static final {{ e.java_type }} {{ e.name }} = {{ e.value }};
+        {% endif %}
+        {% endfor %}
+    }
+    {% endfor %}
+    {% if shared_resources %}
+    public static void onResourcesLoaded(int packageId) {
+        {% for resource_type in resources %}
+        {% for e in resources[resource_type] %}
+        {% if e.java_type == 'int[]' %}
+        for(int i = 0; i < {{ e.resource_type }}.{{ e.name }}.length; ++i) {
+            {{ e.resource_type }}.{{ e.name }}[i] =
+                    ({{ e.resource_type }}.{{ e.name }}[i] & 0x00ffffff)
+                    | (packageId << 24);
+        }
+        {% else %}
+        {{ e.resource_type }}.{{ e.name }} =
+                ({{ e.resource_type }}.{{ e.name }} & 0x00ffffff)
+                | (packageId << 24);
+        {% endif %}
+        {% endfor %}
+        {% endfor %}
+    }
+    {% endif %}
+}
+""", trim_blocks=True, lstrip_blocks=True)
+
+  return template.render(package=package, resources=resources_by_type,
+                         shared_resources=shared_resources)
+
+
+def CrunchDirectory(aapt, input_dir, output_dir):
+  """Crunches the images in input_dir and its subdirectories into output_dir.
+
+  If an image is already optimized, crunching often increases image size. In
+  this case, the crunched image is overwritten with the original image.
+  """
+  aapt_cmd = [aapt,
+              'crunch',
+              '-C', output_dir,
+              '-S', input_dir,
+              '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN]
+  build_utils.CheckOutput(aapt_cmd, stderr_filter=FilterCrunchStderr,
+                          fail_func=DidCrunchFail)
+
+  # Check for images whose size increased during crunching and replace them
+  # with their originals (except for 9-patches, which must be crunched).
+  for dir_, _, files in os.walk(output_dir):
+    for crunched in files:
+      if crunched.endswith('.9.png'):
+        continue
+      if not crunched.endswith('.png'):
+        raise Exception('Unexpected file in crunched dir: ' + crunched)
+      crunched = os.path.join(dir_, crunched)
+      original = os.path.join(input_dir, os.path.relpath(crunched, output_dir))
+      original_size = os.path.getsize(original)
+      crunched_size = os.path.getsize(crunched)
+      if original_size < crunched_size:
+        shutil.copyfile(original, crunched)
+
+
+def FilterCrunchStderr(stderr):
+  """Filters out lines from aapt crunch's stderr that can safely be ignored."""
+  filtered_lines = []
+  for line in stderr.splitlines(True):
+    # Ignore this libpng warning, which is a known non-error condition.
+    # http://crbug.com/364355
+    if ('libpng warning: iCCP: Not recognizing known sRGB profile that has '
+        + 'been edited' in line):
+      continue
+    filtered_lines.append(line)
+  return ''.join(filtered_lines)
+
+
+def DidCrunchFail(returncode, stderr):
+  """Determines whether aapt crunch failed from its return code and output.
+
+  Because aapt's return code cannot be trusted, any output to stderr is
+  an indication that aapt has failed (http://crbug.com/314885).
+  """
+  return returncode != 0 or stderr
+
+
+def ZipResources(resource_dirs, zip_path):
+  # Python zipfile does not provide a way to replace a file (it just writes
+  # another file with the same name). So, first collect all the files to put
+  # in the zip (with proper overriding), and then zip them.
+  files_to_zip = dict()
+  for d in resource_dirs:
+    for root, _, files in os.walk(d):
+      for f in files:
+        archive_path = f
+        parent_dir = os.path.relpath(root, d)
+        if parent_dir != '.':
+          archive_path = os.path.join(parent_dir, f)
+        path = os.path.join(root, f)
+        files_to_zip[archive_path] = path
+  build_utils.DoZip(files_to_zip.iteritems(), zip_path)
+
+
+def CombineZips(zip_files, output_path):
+  # When packaging resources, if the top-level directories in the zip file are
+  # of the form 0, 1, ..., then each subdirectory will be passed to aapt as a
+  # resources directory. While some resources just clobber others (image files,
+  # etc), other resources (particularly .xml files) need to be more
+  # intelligently merged. That merging is left up to aapt.
+  def path_transform(name, src_zip):
+    return '%d/%s' % (zip_files.index(src_zip), name)
+
+  build_utils.MergeZips(output_path, zip_files, path_transform=path_transform)
+
+
+def _OnStaleMd5(options):
+  aapt = options.aapt_path
+  with build_utils.TempDir() as temp_dir:
+    deps_dir = os.path.join(temp_dir, 'deps')
+    build_utils.MakeDirectory(deps_dir)
+    v14_dir = os.path.join(temp_dir, 'v14')
+    build_utils.MakeDirectory(v14_dir)
+
+    gen_dir = os.path.join(temp_dir, 'gen')
+    build_utils.MakeDirectory(gen_dir)
+
+    input_resource_dirs = options.resource_dirs
+
+    if not options.v14_skip:
+      for resource_dir in input_resource_dirs:
+        generate_v14_compatible_resources.GenerateV14Resources(
+            resource_dir,
+            v14_dir)
+
+    dep_zips = options.dependencies_res_zips
+    dep_subdirs = []
+    for z in dep_zips:
+      subdir = os.path.join(deps_dir, os.path.basename(z))
+      if os.path.exists(subdir):
+        raise Exception('Resource zip name conflict: ' + os.path.basename(z))
+      build_utils.ExtractAll(z, path=subdir)
+      dep_subdirs.append(subdir)
+
+    # Generate R.java. This R.java contains non-final constants and is used only
+    # while compiling the library jar (e.g. chromium_content.jar). When building
+    # an apk, a new R.java file with the correct resource -> ID mappings will be
+    # generated by merging the resources from all libraries and the main apk
+    # project.
+    package_command = [aapt,
+                       'package',
+                       '-m',
+                       '-M', options.android_manifest,
+                       '--auto-add-overlay',
+                       '--no-version-vectors',
+                       '-I', options.android_sdk_jar,
+                       '--output-text-symbols', gen_dir,
+                       '-J', gen_dir,
+                       '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN]
+
+    for d in input_resource_dirs:
+      package_command += ['-S', d]
+
+    for d in dep_subdirs:
+      package_command += ['-S', d]
+
+    if options.non_constant_id:
+      package_command.append('--non-constant-id')
+    if options.custom_package:
+      package_command += ['--custom-package', options.custom_package]
+    if options.proguard_file:
+      package_command += ['-G', options.proguard_file]
+    if options.shared_resources:
+      package_command.append('--shared-lib')
+    if options.app_as_shared_lib:
+      package_command.append('--app-as-shared-lib')
+    build_utils.CheckOutput(package_command, print_stderr=False)
+
+    if options.extra_res_packages:
+      CreateExtraRJavaFiles(
+          gen_dir,
+          options.extra_res_packages,
+          options.extra_r_text_files,
+          options.shared_resources or options.app_as_shared_lib,
+          options.include_all_resources)
+
+    # This is the list of directories with resources to put in the final .zip
+    # file. The order of these is important so that crunched/v14 resources
+    # override the normal ones.
+    zip_resource_dirs = input_resource_dirs + [v14_dir]
+
+    base_crunch_dir = os.path.join(temp_dir, 'crunch')
+
+    # Crunch image resources. This shrinks png files and is necessary for
+    # 9-patch images to display correctly. 'aapt crunch' accepts only a single
+    # directory at a time and deletes everything in the output directory.
+    for idx, input_dir in enumerate(input_resource_dirs):
+      crunch_dir = os.path.join(base_crunch_dir, str(idx))
+      build_utils.MakeDirectory(crunch_dir)
+      zip_resource_dirs.append(crunch_dir)
+      CrunchDirectory(aapt, input_dir, crunch_dir)
+
+    ZipResources(zip_resource_dirs, options.resource_zip_out)
+
+    if options.all_resources_zip_out:
+      CombineZips([options.resource_zip_out] + dep_zips,
+                  options.all_resources_zip_out)
+
+    if options.R_dir:
+      build_utils.DeleteDirectory(options.R_dir)
+      shutil.copytree(gen_dir, options.R_dir)
+    else:
+      build_utils.ZipDir(options.srcjar_out, gen_dir)
+
+    if options.r_text_out:
+      r_text_path = os.path.join(gen_dir, 'R.txt')
+      if os.path.exists(r_text_path):
+        shutil.copyfile(r_text_path, options.r_text_out)
+      else:
+        open(options.r_text_out, 'w').close()
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseArgs(args)
+
+  possible_output_paths = [
+    options.resource_zip_out,
+    options.all_resources_zip_out,
+    options.proguard_file,
+    options.r_text_out,
+    options.srcjar_out,
+  ]
+  output_paths = [x for x in possible_output_paths if x]
+
+  # List python deps in input_strings rather than input_paths since the contents
+  # of them does not change what gets written to the depsfile.
+  input_strings = options.extra_res_packages + [
+    options.app_as_shared_lib,
+    options.custom_package,
+    options.include_all_resources,
+    options.non_constant_id,
+    options.shared_resources,
+    options.v14_skip,
+  ]
+
+  input_paths = [
+    options.aapt_path,
+    options.android_manifest,
+    options.android_sdk_jar,
+  ]
+  input_paths.extend(options.dependencies_res_zips)
+  input_paths.extend(p for p in options.extra_r_text_files if os.path.exists(p))
+
+  resource_names = []
+  for resource_dir in options.resource_dirs:
+    for resource_file in build_utils.FindInDirectory(resource_dir, '*'):
+      input_paths.append(resource_file)
+      resource_names.append(os.path.relpath(resource_file, resource_dir))
+
+  # Resource filenames matter to the output, so add them to strings as well.
+  # This matters if a file is renamed but not changed (http://crbug.com/597126).
+  input_strings.extend(sorted(resource_names))
+
+  build_utils.CallAndWriteDepfileIfStale(
+      lambda: _OnStaleMd5(options),
+      options,
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=output_paths,
+      # TODO(agrieve): Remove R_dir when it's no longer used (used only by GYP).
+      force=options.R_dir)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/build/android/gyp/proguard.py b/build/android/gyp/proguard.py
new file mode 100755
index 0000000..d019350
--- /dev/null
+++ b/build/android/gyp/proguard.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+from util import proguard_util
+
+
+def _ParseOptions(args):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--proguard-path',
+                    help='Path to the proguard executable.')
+  parser.add_option('--input-paths',
+                    help='Paths to the .jar files proguard should run on.')
+  parser.add_option('--output-path', help='Path to the generated .jar file.')
+  parser.add_option('--proguard-configs',
+                    help='Paths to proguard configuration files.')
+  parser.add_option('--mapping', help='Path to proguard mapping to apply.')
+  parser.add_option('--is-test', action='store_true',
+      help='If true, extra proguard options for instrumentation tests will be '
+      'added.')
+  parser.add_option('--tested-apk-info', help='Path to the proguard .info file '
+      'for the tested apk')
+  parser.add_option('--classpath', action='append',
+                    help='Classpath for proguard.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--verbose', '-v', action='store_true',
+                    help='Print all proguard output')
+
+  options, _ = parser.parse_args(args)
+
+  classpath = []
+  for arg in options.classpath:
+    classpath += build_utils.ParseGypList(arg)
+  options.classpath = classpath
+
+  return options
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  options = _ParseOptions(args)
+
+  proguard = proguard_util.ProguardCmdBuilder(options.proguard_path)
+  proguard.injars(build_utils.ParseGypList(options.input_paths))
+  proguard.configs(build_utils.ParseGypList(options.proguard_configs))
+  proguard.outjar(options.output_path)
+
+  if options.mapping:
+    proguard.mapping(options.mapping)
+
+  if options.tested_apk_info:
+    proguard.tested_apk_info(options.tested_apk_info)
+
+  classpath = list(set(options.classpath))
+  proguard.libraryjars(classpath)
+  proguard.verbose(options.verbose)
+
+  input_paths = proguard.GetInputs()
+
+  build_utils.CallAndWriteDepfileIfStale(
+      proguard.CheckOutput,
+      options,
+      input_paths=input_paths,
+      input_strings=proguard.build(),
+      output_paths=[options.output_path])
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/push_libraries.py b/build/android/gyp/push_libraries.py
new file mode 100755
index 0000000..3dae6f0
--- /dev/null
+++ b/build/android/gyp/push_libraries.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pushes native libraries to a device.
+
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_device
+from util import build_utils
+from util import md5_check
+
+BUILD_ANDROID_DIR = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir))
+sys.path.append(BUILD_ANDROID_DIR)
+
+import devil_chromium
+from pylib import constants
+
+def DoPush(options):
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  serial_number = device.GetSerialNumber()
+  # A list so that it is modifiable in Push below.
+  needs_directory = [True]
+  for lib in libraries:
+    device_path = os.path.join(options.device_dir, lib)
+    host_path = os.path.join(options.libraries_dir, lib)
+
+    def Push():
+      if needs_directory:
+        device.RunShellCommand('mkdir -p ' + options.device_dir)
+        needs_directory[:] = [] # = False
+      device.PushChangedFiles([(os.path.abspath(host_path), device_path)])
+
+    record_path = '%s.%s.push.md5.stamp' % (host_path, serial_number)
+    md5_check.CallAndRecordIfStale(
+        Push,
+        record_path=record_path,
+        input_paths=[host_path],
+        input_strings=[device_path])
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  parser.add_option('--libraries-dir',
+      help='Directory that contains stripped libraries.')
+  parser.add_option('--device-dir',
+      help='Device directory to push the libraries to.')
+  parser.add_option('--libraries',
+      help='List of native libraries.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--output-directory',
+      help='The output directory.')
+  options, _ = parser.parse_args(args)
+
+  required_options = ['libraries', 'device_dir', 'libraries']
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  devil_chromium.Initialize(
+      output_directory=os.path.abspath(options.output_directory))
+
+  DoPush(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/strip_library_for_device.py b/build/android/gyp/strip_library_for_device.py
new file mode 100755
index 0000000..9e2daae
--- /dev/null
+++ b/build/android/gyp/strip_library_for_device.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+def StripLibrary(android_strip, android_strip_args, library_path, output_path):
+  if build_utils.IsTimeStale(output_path, [library_path]):
+    strip_cmd = ([android_strip] +
+                 android_strip_args +
+                 ['-o', output_path, library_path])
+    build_utils.CheckOutput(strip_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-strip',
+      help='Path to the toolchain\'s strip binary')
+  parser.add_option('--android-strip-arg', action='append',
+      help='Argument to be passed to strip')
+  parser.add_option('--libraries-dir',
+      help='Directory for un-stripped libraries')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory for stripped libraries')
+  parser.add_option('--libraries',
+      help='List of libraries to strip')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  build_utils.MakeDirectory(options.stripped_libraries_dir)
+
+  for library in libraries:
+    for base_path in options.libraries_dir.split(','):
+      library_path = os.path.join(base_path, library)
+      if (os.path.exists(library_path)):
+        break
+    stripped_library_path = os.path.join(
+        options.stripped_libraries_dir, library)
+    StripLibrary(options.android_strip, options.android_strip_arg, library_path,
+        stripped_library_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/test/BUILD.gn b/build/android/gyp/test/BUILD.gn
new file mode 100644
index 0000000..2deac1d
--- /dev/null
+++ b/build/android/gyp/test/BUILD.gn
@@ -0,0 +1,13 @@
+import("//build/config/android/rules.gni")
+
+java_library("hello_world_java") {
+  java_files = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ]
+}
+
+java_binary("hello_world") {
+  deps = [
+    ":hello_world_java",
+  ]
+  java_files = [ "java/org/chromium/helloworld/HelloWorldMain.java" ]
+  main_class = "org.chromium.helloworld.HelloWorldMain"
+}
diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
new file mode 100644
index 0000000..10860d8
--- /dev/null
+++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldMain {
+    public static void main(String[] args) {
+        if (args.length > 0) {
+            System.exit(Integer.parseInt(args[0]));
+        }
+        HelloWorldPrinter.print();
+    }
+}
+
diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
new file mode 100644
index 0000000..b09673e
--- /dev/null
+++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldPrinter {
+    public static void print() {
+        System.out.println("Hello, world!");
+    }
+}
+
diff --git a/build/android/gyp/touch.py b/build/android/gyp/touch.py
new file mode 100755
index 0000000..7b4375e
--- /dev/null
+++ b/build/android/gyp/touch.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+from util import build_utils
+
+def main(argv):
+  for f in argv[1:]:
+    build_utils.Touch(f)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/util/__init__.py b/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/gyp/util/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/gyp/util/build_device.py b/build/android/gyp/util/build_device.py
new file mode 100644
index 0000000..83aa0d5
--- /dev/null
+++ b/build/android/gyp/util/build_device.py
@@ -0,0 +1,108 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" A simple device interface for build steps.
+
+"""
+
+import logging
+import os
+import re
+import sys
+
+from util import build_utils
+
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android.sdk import adb_wrapper
+
+
+def GetAttachedDevices():
+  return [a.GetDeviceSerial()
+          for a in adb_wrapper.AdbWrapper.Devices()]
+
+
+class BuildDevice(object):
+  def __init__(self, configuration):
+    self.id = configuration['id']
+    self.description = configuration['description']
+    self.install_metadata = configuration['install_metadata']
+    self.device = device_utils.DeviceUtils(self.id)
+
+  def RunShellCommand(self, *args, **kwargs):
+    return self.device.RunShellCommand(*args, **kwargs)
+
+  def PushChangedFiles(self, *args, **kwargs):
+    return self.device.PushChangedFiles(*args, **kwargs)
+
+  def GetSerialNumber(self):
+    return self.id
+
+  def Install(self, *args, **kwargs):
+    return self.device.Install(*args, **kwargs)
+
+  def InstallSplitApk(self, *args, **kwargs):
+    return self.device.InstallSplitApk(*args, **kwargs)
+
+  def GetInstallMetadata(self, apk_package):
+    """Gets the metadata on the device for the apk_package apk."""
+    # Matches lines like:
+    # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+    #   org.chromium.chrome.apk
+    # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+    #   org.chromium.chrome-1.apk
+    apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?.apk$' % apk_package, s)
+    matches = filter(apk_matcher, self.install_metadata)
+    return matches[0] if matches else None
+
+
+def GetConfigurationForDevice(device_id):
+  device = device_utils.DeviceUtils(device_id)
+  configuration = None
+  has_root = False
+  is_online = device.IsOnline()
+  if is_online:
+    cmd = 'ls -l /data/app; getprop ro.build.description'
+    cmd_output = device.RunShellCommand(cmd)
+    has_root = not 'Permission denied' in cmd_output[0]
+    if not has_root:
+      # Disable warning log messages from EnableRoot()
+      logging.getLogger().disabled = True
+      try:
+        device.EnableRoot()
+        has_root = True
+      except device_errors.CommandFailedError:
+        has_root = False
+      finally:
+        logging.getLogger().disabled = False
+      cmd_output = device.RunShellCommand(cmd)
+
+    configuration = {
+        'id': device_id,
+        'description': cmd_output[-1],
+        'install_metadata': cmd_output[:-1],
+      }
+  return configuration, is_online, has_root
+
+
+def WriteConfigurations(configurations, path):
+  # Currently we only support installing to the first device.
+  build_utils.WriteJson(configurations[:1], path, only_if_changed=True)
+
+
+def ReadConfigurations(path):
+  return build_utils.ReadJson(path)
+
+
+def GetBuildDevice(configurations):
+  assert len(configurations) == 1
+  return BuildDevice(configurations[0])
+
+
+def GetBuildDeviceFromPath(path):
+  configurations = ReadConfigurations(path)
+  if len(configurations) > 0:
+    return GetBuildDevice(ReadConfigurations(path))
+  return None
+
diff --git a/build/android/gyp/util/build_utils.py b/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000..7c32bc1
--- /dev/null
+++ b/build/android/gyp/util/build_utils.py
@@ -0,0 +1,527 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import ast
+import contextlib
+import fnmatch
+import json
+import os
+import pipes
+import re
+import shlex
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+# Some clients do not add //build/android/gyp to PYTHONPATH.
+import md5_check  # pylint: disable=relative-import
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+from pylib.constants import host_paths
+
+COLORAMA_ROOT = os.path.join(host_paths.DIR_SOURCE_ROOT,
+                             'third_party', 'colorama', 'src')
+# aapt should ignore OWNERS files in addition the default ignore pattern.
+AAPT_IGNORE_PATTERN = ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:' +
+                       '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp')
+_HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
+_HERMETIC_FILE_ATTR = (0644 << 16L)
+
+
+@contextlib.contextmanager
+def TempDir():
+  dirname = tempfile.mkdtemp()
+  try:
+    yield dirname
+  finally:
+    shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+  try:
+    os.makedirs(dir_path)
+  except OSError:
+    pass
+
+
+def DeleteDirectory(dir_path):
+  if os.path.exists(dir_path):
+    shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+  if fail_if_missing and not os.path.exists(path):
+    raise Exception(path + ' doesn\'t exist.')
+
+  MakeDirectory(os.path.dirname(path))
+  with open(path, 'a'):
+    os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter):
+  files = []
+  for root, _dirnames, filenames in os.walk(directory):
+    matched_files = fnmatch.filter(filenames, filename_filter)
+    files.extend((os.path.join(root, f) for f in matched_files))
+  return files
+
+
+def FindInDirectories(directories, filename_filter):
+  all_files = []
+  for directory in directories:
+    all_files.extend(FindInDirectory(directory, filename_filter))
+  return all_files
+
+
+def ParseGnList(gn_string):
+  # TODO(brettw) bug 573132: This doesn't handle GN escaping properly, so any
+  # weird characters like $ or \ in the strings will be corrupted.
+  #
+  # The code should import build/gn_helpers.py and then do:
+  #   parser = gn_helpers.GNValueParser(gn_string)
+  #   return return parser.ParseList()
+  # As of this writing, though, there is a CastShell build script that sends
+  # JSON through this function, and using correct GN parsing corrupts that.
+  #
+  # We need to be consistent about passing either JSON or GN lists through
+  # this function.
+  return ast.literal_eval(gn_string)
+
+
+def ParseGypList(gyp_string):
+  # The ninja generator doesn't support $ in strings, so use ## to
+  # represent $.
+  # TODO(cjhopman): Remove when
+  # https://code.google.com/p/gyp/issues/detail?id=327
+  # is addressed.
+  gyp_string = gyp_string.replace('##', '$')
+
+  if gyp_string.startswith('['):
+    return ParseGnList(gyp_string)
+  return shlex.split(gyp_string)
+
+
+def CheckOptions(options, parser, required=None):
+  if not required:
+    return
+  for option_name in required:
+    if getattr(options, option_name) is None:
+      parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+  old_dump = None
+  if os.path.exists(path):
+    with open(path, 'r') as oldfile:
+      old_dump = oldfile.read()
+
+  new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+  if not only_if_changed or old_dump != new_dump:
+    with open(path, 'w') as outfile:
+      outfile.write(new_dump)
+
+
+def ReadJson(path):
+  with open(path, 'r') as jsonfile:
+    return json.load(jsonfile)
+
+
+class CalledProcessError(Exception):
+  """This exception is raised when the process run by CheckOutput
+  exits with a non-zero exit code."""
+
+  def __init__(self, cwd, args, output):
+    super(CalledProcessError, self).__init__()
+    self.cwd = cwd
+    self.args = args
+    self.output = output
+
+  def __str__(self):
+    # A user should be able to simply copy and paste the command that failed
+    # into their shell.
+    copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+        ' '.join(map(pipes.quote, self.args)))
+    return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args, cwd=None, env=None,
+                print_stdout=False, print_stderr=True,
+                stdout_filter=None,
+                stderr_filter=None,
+                fail_func=lambda returncode, stderr: returncode != 0):
+  if not cwd:
+    cwd = os.getcwd()
+
+  child = subprocess.Popen(args,
+      stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env)
+  stdout, stderr = child.communicate()
+
+  if stdout_filter is not None:
+    stdout = stdout_filter(stdout)
+
+  if stderr_filter is not None:
+    stderr = stderr_filter(stderr)
+
+  if fail_func(child.returncode, stderr):
+    raise CalledProcessError(cwd, args, stdout + stderr)
+
+  if print_stdout:
+    sys.stdout.write(stdout)
+  if print_stderr:
+    sys.stderr.write(stderr)
+
+  return stdout
+
+
+def GetModifiedTime(path):
+  # For a symlink, the modified time should be the greater of the link's
+  # modified time and the modified time of the target.
+  return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+  if not os.path.exists(output):
+    return True
+
+  output_time = GetModifiedTime(output)
+  for i in inputs:
+    if GetModifiedTime(i) > output_time:
+      return True
+  return False
+
+
+def IsDeviceReady():
+  device_state = CheckOutput(['adb', 'get-state'])
+  return device_state.strip() == 'device'
+
+
+def CheckZipPath(name):
+  if os.path.normpath(name) != name:
+    raise Exception('Non-canonical zip path: %s' % name)
+  if os.path.isabs(name):
+    raise Exception('Absolute zip path: %s' % name)
+
+
+def IsSymlink(zip_file, name):
+  zi = zip_file.getinfo(name)
+
+  # The two high-order bytes of ZipInfo.external_attr represent
+  # UNIX permissions and file type bits.
+  return stat.S_ISLNK(zi.external_attr >> 16L)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None,
+               predicate=None):
+  if path is None:
+    path = os.getcwd()
+  elif not os.path.exists(path):
+    MakeDirectory(path)
+
+  with zipfile.ZipFile(zip_path) as z:
+    for name in z.namelist():
+      if name.endswith('/'):
+        continue
+      if pattern is not None:
+        if not fnmatch.fnmatch(name, pattern):
+          continue
+      if predicate and not predicate(name):
+        continue
+      CheckZipPath(name)
+      if no_clobber:
+        output_path = os.path.join(path, name)
+        if os.path.exists(output_path):
+          raise Exception(
+              'Path already exists from zip: %s %s %s'
+              % (zip_path, name, output_path))
+      if IsSymlink(z, name):
+        dest = os.path.join(path, name)
+        MakeDirectory(os.path.dirname(dest))
+        os.symlink(z.read(name), dest)
+      else:
+        z.extract(name, path)
+
+
+def AddToZipHermetic(zip_file, zip_path, src_path=None, data=None,
+                     compress=None):
+  """Adds a file to the given ZipFile with a hard-coded modified time.
+
+  Args:
+    zip_file: ZipFile instance to add the file to.
+    zip_path: Destination path within the zip file.
+    src_path: Path of the source file. Mutually exclusive with |data|.
+    data: File data as a string.
+    compress: Whether to enable compression. Default is take from ZipFile
+        constructor.
+  """
+  assert (src_path is None) != (data is None), (
+      '|src_path| and |data| are mutually exclusive.')
+  CheckZipPath(zip_path)
+  zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=_HERMETIC_TIMESTAMP)
+  zipinfo.external_attr = _HERMETIC_FILE_ATTR
+
+  if src_path and os.path.islink(src_path):
+    zipinfo.filename = zip_path
+    zipinfo.external_attr |= stat.S_IFLNK << 16L # mark as a symlink
+    zip_file.writestr(zipinfo, os.readlink(src_path))
+    return
+
+  if src_path:
+    with file(src_path) as f:
+      data = f.read()
+
+  # zipfile will deflate even when it makes the file bigger. To avoid
+  # growing files, disable compression at an arbitrary cut off point.
+  if len(data) < 16:
+    compress = False
+
+  # None converts to ZIP_STORED, when passed explicitly rather than the
+  # default passed to the ZipFile constructor.
+  compress_type = zip_file.compression
+  if compress is not None:
+    compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+  zip_file.writestr(zipinfo, data, compress_type)
+
+
+def DoZip(inputs, output, base_dir=None):
+  """Creates a zip file from a list of files.
+
+  Args:
+    inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
+    output: Destination .zip file.
+    base_dir: Prefix to strip from inputs.
+  """
+  input_tuples = []
+  for tup in inputs:
+    if isinstance(tup, basestring):
+      tup = (os.path.relpath(tup, base_dir), tup)
+    input_tuples.append(tup)
+
+  # Sort by zip path to ensure stable zip ordering.
+  input_tuples.sort(key=lambda tup: tup[0])
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for zip_path, fs_path in input_tuples:
+      AddToZipHermetic(outfile, zip_path, src_path=fs_path)
+
+
+def ZipDir(output, base_dir):
+  """Creates a zip file from a directory."""
+  inputs = []
+  for root, _, files in os.walk(base_dir):
+    for f in files:
+      inputs.append(os.path.join(root, f))
+  DoZip(inputs, output, base_dir)
+
+
+def MatchesGlob(path, filters):
+  """Returns whether the given path matches any of the given glob patterns."""
+  return filters and any(fnmatch.fnmatch(path, f) for f in filters)
+
+
+def MergeZips(output, inputs, exclude_patterns=None, path_transform=None):
+  path_transform = path_transform or (lambda p, z: p)
+  added_names = set()
+
+  with zipfile.ZipFile(output, 'w') as out_zip:
+    for in_file in inputs:
+      with zipfile.ZipFile(in_file, 'r') as in_zip:
+        in_zip._expected_crc = None
+        for info in in_zip.infolist():
+          # Ignore directories.
+          if info.filename[-1] == '/':
+            continue
+          dst_name = path_transform(info.filename, in_file)
+          already_added = dst_name in added_names
+          if not already_added and not MatchesGlob(dst_name, exclude_patterns):
+            AddToZipHermetic(out_zip, dst_name, data=in_zip.read(info))
+            added_names.add(dst_name)
+
+
+def PrintWarning(message):
+  print 'WARNING: ' + message
+
+
+def PrintBigWarning(message):
+  print '*****     ' * 8
+  PrintWarning(message)
+  print '*****     ' * 8
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+  """Gets the list of all transitive dependencies in sorted order.
+
+  There should be no cycles in the dependency graph.
+
+  Args:
+    top: a list of the top level nodes
+    deps_func: A function that takes a node and returns its direct dependencies.
+  Returns:
+    A list of all transitive dependencies of nodes in top, in order (a node will
+    appear in the list at a higher index than all of its dependencies).
+  """
+  def Node(dep):
+    return (dep, deps_func(dep))
+
+  # First: find all deps
+  unchecked_deps = list(top)
+  all_deps = set(top)
+  while unchecked_deps:
+    dep = unchecked_deps.pop()
+    new_deps = deps_func(dep).difference(all_deps)
+    unchecked_deps.extend(new_deps)
+    all_deps = all_deps.union(new_deps)
+
+  # Then: simple, slow topological sort.
+  sorted_deps = []
+  unsorted_deps = dict(map(Node, all_deps))
+  while unsorted_deps:
+    for library, dependencies in unsorted_deps.items():
+      if not dependencies.intersection(unsorted_deps.keys()):
+        sorted_deps.append(library)
+        del unsorted_deps[library]
+
+  return sorted_deps
+
+
+def GetPythonDependencies():
+  """Gets the paths of imported non-system python modules.
+
+  A path is assumed to be a "system" import if it is outside of chromium's
+  src/. The paths will be relative to the current directory.
+  """
+  module_paths = (m.__file__ for m in sys.modules.itervalues()
+                  if m is not None and hasattr(m, '__file__'))
+
+  abs_module_paths = map(os.path.abspath, module_paths)
+
+  assert os.path.isabs(host_paths.DIR_SOURCE_ROOT)
+  non_system_module_paths = [
+      p for p in abs_module_paths if p.startswith(host_paths.DIR_SOURCE_ROOT)]
+  def ConvertPycToPy(s):
+    if s.endswith('.pyc'):
+      return s[:-1]
+    return s
+
+  non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
+  non_system_module_paths = map(os.path.relpath, non_system_module_paths)
+  return sorted(set(non_system_module_paths))
+
+
+def AddDepfileOption(parser):
+  # TODO(agrieve): Get rid of this once we've moved to argparse.
+  if hasattr(parser, 'add_option'):
+    func = parser.add_option
+  else:
+    func = parser.add_argument
+  func('--depfile',
+       help='Path to depfile. Must be specified as the action\'s first output.')
+
+
+def WriteDepfile(path, dependencies):
+  with open(path, 'w') as depfile:
+    depfile.write(path)
+    depfile.write(': ')
+    depfile.write(' '.join(dependencies))
+    depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+  """Replaces file-arg placeholders in args.
+
+  These placeholders have the form:
+    @FileArg(filename:key1:key2:...:keyn)
+
+  The value of such a placeholder is calculated by reading 'filename' as json.
+  And then extracting the value at [key1][key2]...[keyn].
+
+  Note: This intentionally does not return the list of files that appear in such
+  placeholders. An action that uses file-args *must* know the paths of those
+  files prior to the parsing of the arguments (typically by explicitly listing
+  them in the action's inputs in build files).
+  """
+  new_args = list(args)
+  file_jsons = dict()
+  r = re.compile('@FileArg\((.*?)\)')
+  for i, arg in enumerate(args):
+    match = r.search(arg)
+    if not match:
+      continue
+
+    if match.end() != len(arg):
+      raise Exception('Unexpected characters after FileArg: ' + arg)
+
+    lookup_path = match.group(1).split(':')
+    file_path = lookup_path[0]
+    if not file_path in file_jsons:
+      file_jsons[file_path] = ReadJson(file_path)
+
+    expansion = file_jsons[file_path]
+    for k in lookup_path[1:]:
+      expansion = expansion[k]
+
+    new_args[i] = arg[:match.start()] + str(expansion)
+
+  return new_args
+
+
+def CallAndWriteDepfileIfStale(function, options, record_path=None,
+                               input_paths=None, input_strings=None,
+                               output_paths=None, force=False,
+                               pass_changes=False,
+                               depfile_deps=None):
+  """Wraps md5_check.CallAndRecordIfStale() and also writes dep & stamp files.
+
+  Depfiles and stamp files are automatically added to output_paths when present
+  in the |options| argument. They are then created after |function| is called.
+
+  By default, only python dependencies are added to the depfile. If there are
+  other input paths that are not captured by GN deps, then they should be listed
+  in depfile_deps. It's important to write paths to the depfile that are already
+  captured by GN deps since GN args can cause GN deps to change, and such
+  changes are not immediately reflected in depfiles (http://crbug.com/589311).
+  """
+  if not output_paths:
+    raise Exception('At least one output_path must be specified.')
+  input_paths = list(input_paths or [])
+  input_strings = list(input_strings or [])
+  output_paths = list(output_paths or [])
+
+  python_deps = None
+  if hasattr(options, 'depfile') and options.depfile:
+    python_deps = GetPythonDependencies()
+    # List python deps in input_strings rather than input_paths since the
+    # contents of them does not change what gets written to the depfile.
+    input_strings += python_deps
+    output_paths += [options.depfile]
+
+  stamp_file = hasattr(options, 'stamp') and options.stamp
+  if stamp_file:
+    output_paths += [stamp_file]
+
+  def on_stale_md5(changes):
+    args = (changes,) if pass_changes else ()
+    function(*args)
+    if python_deps is not None:
+      all_depfile_deps = list(python_deps)
+      if depfile_deps:
+        all_depfile_deps.extend(depfile_deps)
+      WriteDepfile(options.depfile, all_depfile_deps)
+    if stamp_file:
+      Touch(stamp_file)
+
+  md5_check.CallAndRecordIfStale(
+      on_stale_md5,
+      record_path=record_path,
+      input_paths=input_paths,
+      input_strings=input_strings,
+      output_paths=output_paths,
+      force=force,
+      pass_changes=True)
+
diff --git a/build/android/gyp/util/md5_check.py b/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000..7dac2e4
--- /dev/null
+++ b/build/android/gyp/util/md5_check.py
@@ -0,0 +1,402 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import difflib
+import hashlib
+import itertools
+import json
+import os
+import sys
+import zipfile
+
+
+# When set and a difference is detected, a diff of what changed is printed.
+PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+
+# An escape hatch that causes all targets to be rebuilt.
+_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
+
+
+def CallAndRecordIfStale(
+    function, record_path=None, input_paths=None, input_strings=None,
+    output_paths=None, force=False, pass_changes=False):
+  """Calls function if outputs are stale.
+
+  Outputs are considered stale if:
+  - any output_paths are missing, or
+  - the contents of any file within input_paths has changed, or
+  - the contents of input_strings has changed.
+
+  To debug which files are out-of-date, set the environment variable:
+      PRINT_MD5_DIFFS=1
+
+  Args:
+    function: The function to call.
+    record_path: Path to record metadata.
+      Defaults to output_paths[0] + '.md5.stamp'
+    input_paths: List of paths to calcualte an md5 sum on.
+    input_strings: List of strings to record verbatim.
+    output_paths: List of output paths.
+    force: Whether to treat outputs as missing regardless of whether they
+      actually are.
+    pass_changes: Whether to pass a Changes instance to |function|.
+  """
+  assert record_path or output_paths
+  input_paths = input_paths or []
+  input_strings = input_strings or []
+  output_paths = output_paths or []
+  record_path = record_path or output_paths[0] + '.md5.stamp'
+
+  assert record_path.endswith('.stamp'), (
+      'record paths must end in \'.stamp\' so that they are easy to find '
+      'and delete')
+
+  new_metadata = _Metadata()
+  new_metadata.AddStrings(input_strings)
+
+  for path in input_paths:
+    if _IsZipFile(path):
+      entries = _ExtractZipEntries(path)
+      new_metadata.AddZipFile(path, entries)
+    else:
+      new_metadata.AddFile(path, _Md5ForPath(path))
+
+  old_metadata = None
+  force = force or _FORCE_REBUILD
+  missing_outputs = [x for x in output_paths if force or not os.path.exists(x)]
+  # When outputs are missing, don't bother gathering change information.
+  if not missing_outputs and os.path.exists(record_path):
+    with open(record_path, 'r') as jsonfile:
+      try:
+        old_metadata = _Metadata.FromFile(jsonfile)
+      except:  # pylint: disable=bare-except
+        pass  # Not yet using new file format.
+
+  changes = Changes(old_metadata, new_metadata, force, missing_outputs)
+  if not changes.HasChanges():
+    return
+
+  if PRINT_EXPLANATIONS:
+    print '=' * 80
+    print 'Target is stale: %s' % record_path
+    print changes.DescribeDifference()
+    print '=' * 80
+
+  args = (changes,) if pass_changes else ()
+  function(*args)
+
+  with open(record_path, 'w') as f:
+    new_metadata.ToFile(f)
+
+
+class Changes(object):
+  """Provides and API for querying what changed between runs."""
+
+  def __init__(self, old_metadata, new_metadata, force, missing_outputs):
+    self.old_metadata = old_metadata
+    self.new_metadata = new_metadata
+    self.force = force
+    self.missing_outputs = missing_outputs
+
+  def _GetOldTag(self, path, subpath=None):
+    return self.old_metadata and self.old_metadata.GetTag(path, subpath)
+
+  def HasChanges(self):
+    """Returns whether any changes exist."""
+    return (self.force or
+            not self.old_metadata or
+            self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5() or
+            self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5())
+
+  def AddedOrModifiedOnly(self):
+    """Returns whether the only changes were from added or modified (sub)files.
+
+    No missing outputs, no removed paths/subpaths.
+    """
+    if (self.force or
+        not self.old_metadata or
+        self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5()):
+      return False
+    if any(self.IterRemovedPaths()):
+      return False
+    for path in self.IterModifiedPaths():
+      if any(self.IterRemovedSubpaths(path)):
+        return False
+    return True
+
+  def IterAddedPaths(self):
+    """Generator for paths that were added."""
+    for path in self.new_metadata.IterPaths():
+      if self._GetOldTag(path) is None:
+        yield path
+
+  def IterAddedSubpaths(self, path):
+    """Generator for paths that were added within the given zip file."""
+    for subpath in self.new_metadata.IterSubpaths(path):
+      if self._GetOldTag(path, subpath) is None:
+        yield subpath
+
+  def IterRemovedPaths(self):
+    """Generator for paths that were removed."""
+    if self.old_metadata:
+      for path in self.old_metadata.IterPaths():
+        if self.new_metadata.GetTag(path) is None:
+          yield path
+
+  def IterRemovedSubpaths(self, path):
+    """Generator for paths that were removed within the given zip file."""
+    if self.old_metadata:
+      for subpath in self.old_metadata.IterSubpaths(path):
+        if self.new_metadata.GetTag(path, subpath) is None:
+          yield subpath
+
+  def IterModifiedPaths(self):
+    """Generator for paths whose contents have changed."""
+    for path in self.new_metadata.IterPaths():
+      old_tag = self._GetOldTag(path)
+      new_tag = self.new_metadata.GetTag(path)
+      if old_tag is not None and old_tag != new_tag:
+        yield path
+
+  def IterModifiedSubpaths(self, path):
+    """Generator for paths within a zip file whose contents have changed."""
+    for subpath in self.new_metadata.IterSubpaths(path):
+      old_tag = self._GetOldTag(path, subpath)
+      new_tag = self.new_metadata.GetTag(path, subpath)
+      if old_tag is not None and old_tag != new_tag:
+        yield subpath
+
+  def IterChangedPaths(self):
+    """Generator for all changed paths (added/removed/modified)."""
+    return itertools.chain(self.IterRemovedPaths(),
+                           self.IterModifiedPaths(),
+                           self.IterAddedPaths())
+
+  def IterChangedSubpaths(self, path):
+    """Generator for paths within a zip that were added/removed/modified."""
+    return itertools.chain(self.IterRemovedSubpaths(path),
+                           self.IterModifiedSubpaths(path),
+                           self.IterAddedSubpaths(path))
+
+  def DescribeDifference(self):
+    """Returns a human-readable description of what changed."""
+    if self.force:
+      return 'force=True'
+    elif self.missing_outputs:
+      return 'Outputs do not exist:\n  ' + '\n  '.join(self.missing_outputs)
+    elif self.old_metadata is None:
+      return 'Previous stamp file not found.'
+
+    if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5():
+      ndiff = difflib.ndiff(self.old_metadata.GetStrings(),
+                            self.new_metadata.GetStrings())
+      changed = [s for s in ndiff if not s.startswith(' ')]
+      return 'Input strings changed:\n  ' + '\n  '.join(changed)
+
+    if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5():
+      return "There's no difference."
+
+    lines = []
+    lines.extend('Added: ' + p for p in self.IterAddedPaths())
+    lines.extend('Removed: ' + p for p in self.IterRemovedPaths())
+    for path in self.IterModifiedPaths():
+      lines.append('Modified: ' + path)
+      lines.extend('  -> Subpath added: ' + p
+                   for p in self.IterAddedSubpaths(path))
+      lines.extend('  -> Subpath removed: ' + p
+                   for p in self.IterRemovedSubpaths(path))
+      lines.extend('  -> Subpath modified: ' + p
+                   for p in self.IterModifiedSubpaths(path))
+    if lines:
+      return 'Input files changed:\n  ' + '\n  '.join(lines)
+    return 'I have no idea what changed (there is a bug).'
+
+
+class _Metadata(object):
+  """Data model for tracking change metadata."""
+  # Schema:
+  # {
+  #   "files-md5": "VALUE",
+  #   "strings-md5": "VALUE",
+  #   "input-files": [
+  #     {
+  #       "path": "path.jar",
+  #       "tag": "{MD5 of entries}",
+  #       "entries": [
+  #         { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ...
+  #       ]
+  #     }, {
+  #       "path": "path.txt",
+  #       "tag": "{MD5}",
+  #     }
+  #   ],
+  #   "input-strings": ["a", "b", ...],
+  # }
+  def __init__(self):
+    self._files_md5 = None
+    self._strings_md5 = None
+    self._files = []
+    self._strings = []
+    # Map of (path, subpath) -> entry. Created upon first call to _GetEntry().
+    self._file_map = None
+
+  @classmethod
+  def FromFile(cls, fileobj):
+    """Returns a _Metadata initialized from a file object."""
+    ret = cls()
+    obj = json.load(fileobj)
+    ret._files_md5 = obj['files-md5']
+    ret._strings_md5 = obj['strings-md5']
+    ret._files = obj['input-files']
+    ret._strings = obj['input-strings']
+    return ret
+
+  def ToFile(self, fileobj):
+    """Serializes metadata to the given file object."""
+    obj = {
+        "files-md5": self.FilesMd5(),
+        "strings-md5": self.StringsMd5(),
+        "input-files": self._files,
+        "input-strings": self._strings,
+    }
+    json.dump(obj, fileobj, indent=2)
+
+  def _AssertNotQueried(self):
+    assert self._files_md5 is None
+    assert self._strings_md5 is None
+    assert self._file_map is None
+
+  def AddStrings(self, values):
+    self._AssertNotQueried()
+    self._strings.extend(str(v) for v in values)
+
+  def AddFile(self, path, tag):
+    """Adds metadata for a non-zip file.
+
+    Args:
+      path: Path to the file.
+      tag: A short string representative of the file contents.
+    """
+    self._AssertNotQueried()
+    self._files.append({
+        'path': path,
+        'tag': tag,
+    })
+
+  def AddZipFile(self, path, entries):
+    """Adds metadata for a zip file.
+
+    Args:
+      path: Path to the file.
+      entries: List of (subpath, tag) tuples for entries within the zip.
+    """
+    self._AssertNotQueried()
+    tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries),
+                                            (e[1] for e in entries)))
+    self._files.append({
+        'path': path,
+        'tag': tag,
+        'entries': [{"path": e[0], "tag": e[1]} for e in entries],
+    })
+
+  def GetStrings(self):
+    """Returns the list of input strings."""
+    return self._strings
+
+  def FilesMd5(self):
+    """Lazily computes and returns the aggregate md5 of input files."""
+    if self._files_md5 is None:
+      # Omit paths from md5 since temporary files have random names.
+      self._files_md5 = _ComputeInlineMd5(
+          self.GetTag(p) for p in sorted(self.IterPaths()))
+    return self._files_md5
+
+  def StringsMd5(self):
+    """Lazily computes and returns the aggregate md5 of input strings."""
+    if self._strings_md5 is None:
+      self._strings_md5 = _ComputeInlineMd5(self._strings)
+    return self._strings_md5
+
+  def _GetEntry(self, path, subpath=None):
+    """Returns the JSON entry for the given path / subpath."""
+    if self._file_map is None:
+      self._file_map = {}
+      for entry in self._files:
+        self._file_map[(entry['path'], None)] = entry
+        for subentry in entry.get('entries', ()):
+          self._file_map[(entry['path'], subentry['path'])] = subentry
+    return self._file_map.get((path, subpath))
+
+  def GetTag(self, path, subpath=None):
+    """Returns the tag for the given path / subpath."""
+    ret = self._GetEntry(path, subpath)
+    return ret and ret['tag']
+
+  def IterPaths(self):
+    """Returns a generator for all top-level paths."""
+    return (e['path'] for e in self._files)
+
+  def IterSubpaths(self, path):
+    """Returns a generator for all subpaths in the given zip.
+
+    If the given path is not a zip file or doesn't exist, returns an empty
+    iterable.
+    """
+    outer_entry = self._GetEntry(path)
+    if not outer_entry:
+      return ()
+    subentries = outer_entry.get('entries', [])
+    return (entry['path'] for entry in subentries)
+
+
+def _UpdateMd5ForFile(md5, path, block_size=2**16):
+  with open(path, 'rb') as infile:
+    while True:
+      data = infile.read(block_size)
+      if not data:
+        break
+      md5.update(data)
+
+
+def _UpdateMd5ForDirectory(md5, dir_path):
+  for root, _, files in os.walk(dir_path):
+    for f in files:
+      _UpdateMd5ForFile(md5, os.path.join(root, f))
+
+
+def _Md5ForPath(path):
+  md5 = hashlib.md5()
+  if os.path.isdir(path):
+    _UpdateMd5ForDirectory(md5, path)
+  else:
+    _UpdateMd5ForFile(md5, path)
+  return md5.hexdigest()
+
+
+def _ComputeInlineMd5(iterable):
+  """Computes the md5 of the concatenated parameters."""
+  md5 = hashlib.md5()
+  for item in iterable:
+    md5.update(str(item))
+  return md5.hexdigest()
+
+
+def _IsZipFile(path):
+  """Returns whether to treat the given file as a zip file."""
+  # ijar doesn't set the CRC32 field.
+  if path.endswith('.interface.jar'):
+    return False
+  return path[-4:] in ('.zip', '.apk', '.jar') or path.endswith('.srcjar')
+
+
+def _ExtractZipEntries(path):
+  """Returns a list of (path, CRC32) of all files within |path|."""
+  entries = []
+  with zipfile.ZipFile(path) as zip_file:
+    for zip_info in zip_file.infolist():
+      # Skip directories and empty files.
+      if zip_info.CRC:
+        entries.append(
+            (zip_info.filename, zip_info.CRC + zip_info.compress_type))
+  return entries
diff --git a/build/android/gyp/util/md5_check_test.py b/build/android/gyp/util/md5_check_test.py
new file mode 100755
index 0000000..312d4a9
--- /dev/null
+++ b/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import tempfile
+import unittest
+import zipfile
+
+import md5_check # pylint: disable=W0403
+
+
+def _WriteZipFile(path, entries):
+  with zipfile.ZipFile(path, 'w') as zip_file:
+    for subpath, data in entries:
+      zip_file.writestr(subpath, data)
+
+
+class TestMd5Check(unittest.TestCase):
+  def setUp(self):
+    self.called = False
+    self.changes = None
+
+  def testCallAndRecordIfStale(self):
+    input_strings = ['string1', 'string2']
+    input_file1 = tempfile.NamedTemporaryFile(suffix='.txt')
+    input_file2 = tempfile.NamedTemporaryFile(suffix='.zip')
+    file1_contents = 'input file 1'
+    input_file1.write(file1_contents)
+    input_file1.flush()
+    # Test out empty zip file to start.
+    _WriteZipFile(input_file2.name, [])
+    input_files = [input_file1.name, input_file2.name]
+
+    record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+    def CheckCallAndRecord(should_call, message, force=False,
+                           outputs_specified=False, outputs_missing=False,
+                           expected_changes=None, added_or_modified_only=None):
+      output_paths = None
+      if outputs_specified:
+        output_file1 = tempfile.NamedTemporaryFile()
+        if outputs_missing:
+          output_file1.close()  # Gets deleted on close().
+        output_paths = [output_file1.name]
+
+      self.called = False
+      self.changes = None
+      if expected_changes or added_or_modified_only is not None:
+        def MarkCalled(changes):
+          self.called = True
+          self.changes = changes
+      else:
+        def MarkCalled():
+          self.called = True
+
+      md5_check.CallAndRecordIfStale(
+          MarkCalled,
+          record_path=record_path.name,
+          input_paths=input_files,
+          input_strings=input_strings,
+          output_paths=output_paths,
+          force=force,
+          pass_changes=(expected_changes or added_or_modified_only) is not None)
+      self.assertEqual(should_call, self.called, message)
+      if expected_changes:
+        description = self.changes.DescribeDifference()
+        self.assertTrue(fnmatch.fnmatch(description, expected_changes),
+                        'Expected %s to match %s' % (
+                        repr(description), repr(expected_changes)))
+      if should_call and added_or_modified_only is not None:
+        self.assertEqual(added_or_modified_only,
+                         self.changes.AddedOrModifiedOnly())
+
+    CheckCallAndRecord(True, 'should call when record doesn\'t exist',
+                       expected_changes='Previous stamp file not found.',
+                       added_or_modified_only=False)
+    CheckCallAndRecord(False, 'should not call when nothing changed')
+    CheckCallAndRecord(False, 'should not call when nothing changed #2',
+                       outputs_specified=True, outputs_missing=False)
+    CheckCallAndRecord(True, 'should call when output missing',
+                       outputs_specified=True, outputs_missing=True,
+                       expected_changes='Outputs do not exist:*',
+                       added_or_modified_only=False)
+    CheckCallAndRecord(True, force=True, message='should call when forced',
+                       expected_changes='force=True',
+                       added_or_modified_only=False)
+
+    input_file1.write('some more input')
+    input_file1.flush()
+    CheckCallAndRecord(True, 'changed input file should trigger call',
+                       expected_changes='*Modified: %s' % input_file1.name,
+                       added_or_modified_only=True)
+
+    input_files = input_files[::-1]
+    CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+    input_files = input_files[:1]
+    CheckCallAndRecord(True, 'removing file should trigger call',
+                       expected_changes='*Removed: %s' % input_file1.name,
+                       added_or_modified_only=False)
+
+    input_files.append(input_file1.name)
+    CheckCallAndRecord(True, 'added input file should trigger call',
+                       expected_changes='*Added: %s' % input_file1.name,
+                       added_or_modified_only=True)
+
+    input_strings[0] = input_strings[0] + ' a bit longer'
+    CheckCallAndRecord(True, 'changed input string should trigger call',
+                       expected_changes='*Input strings changed*',
+                       added_or_modified_only=False)
+
+    input_strings = input_strings[::-1]
+    CheckCallAndRecord(True, 'reordering of string inputs should trigger call',
+                       expected_changes='*Input strings changed*')
+
+    input_strings = input_strings[:1]
+    CheckCallAndRecord(True, 'removing a string should trigger call')
+
+    input_strings.append('a brand new string')
+    CheckCallAndRecord(True, 'added input string should trigger call')
+
+    _WriteZipFile(input_file2.name, [('path/1.txt', '1')])
+    CheckCallAndRecord(True, 'added subpath should trigger call',
+                       expected_changes='*Modified: %s*Subpath added: %s' % (
+                                        input_file2.name, 'path/1.txt'),
+                       added_or_modified_only=True)
+    _WriteZipFile(input_file2.name, [('path/1.txt', '2')])
+    CheckCallAndRecord(True, 'changed subpath should trigger call',
+                       expected_changes='*Modified: %s*Subpath modified: %s' % (
+                                        input_file2.name, 'path/1.txt'),
+                       added_or_modified_only=True)
+    CheckCallAndRecord(False, 'should not call when nothing changed')
+
+    _WriteZipFile(input_file2.name, [])
+    CheckCallAndRecord(True, 'removed subpath should trigger call',
+                       expected_changes='*Modified: %s*Subpath removed: %s' % (
+                                        input_file2.name, 'path/1.txt'),
+                       added_or_modified_only=False)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/gyp/util/proguard_util.py b/build/android/gyp/util/proguard_util.py
new file mode 100644
index 0000000..f315979
--- /dev/null
+++ b/build/android/gyp/util/proguard_util.py
@@ -0,0 +1,189 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+from util import build_utils
+
+
+class _ProguardOutputFilter(object):
+  """ProGuard outputs boring stuff to stdout (proguard version, jar path, etc)
+  as well as interesting stuff (notes, warnings, etc). If stdout is entirely
+  boring, this class suppresses the output.
+  """
+
+  IGNORE_RE = re.compile(
+      r'(?:Pro.*version|Note:|Reading|Preparing|.*:.*(?:MANIFEST\.MF|\.empty))')
+
+  def __init__(self):
+    self._last_line_ignored = False
+
+  def __call__(self, output):
+    ret = []
+    for line in output.splitlines(True):
+      if not line.startswith(' '):
+        self._last_line_ignored = bool(self.IGNORE_RE.match(line))
+      elif 'You should check if you need to specify' in line:
+        self._last_line_ignored = True
+
+      if not self._last_line_ignored:
+        ret.append(line)
+    return ''.join(ret)
+
+
+class ProguardCmdBuilder(object):
+  def __init__(self, proguard_jar):
+    assert os.path.exists(proguard_jar)
+    self._proguard_jar_path = proguard_jar
+    self._tested_apk_info_path = None
+    self._tested_apk_info = None
+    self._mapping = None
+    self._libraries = None
+    self._injars = None
+    self._configs = None
+    self._outjar = None
+    self._cmd = None
+    self._verbose = False
+
+  def outjar(self, path):
+    assert self._cmd is None
+    assert self._outjar is None
+    self._outjar = path
+
+  def tested_apk_info(self, tested_apk_info_path):
+    assert self._cmd is None
+    assert self._tested_apk_info is None
+    self._tested_apk_info_path = tested_apk_info_path
+
+  def mapping(self, path):
+    assert self._cmd is None
+    assert self._mapping is None
+    assert os.path.exists(path), path
+    self._mapping = path
+
+  def libraryjars(self, paths):
+    assert self._cmd is None
+    assert self._libraries is None
+    for p in paths:
+      assert os.path.exists(p), p
+    self._libraries = paths
+
+  def injars(self, paths):
+    assert self._cmd is None
+    assert self._injars is None
+    for p in paths:
+      assert os.path.exists(p), p
+    self._injars = paths
+
+  def configs(self, paths):
+    assert self._cmd is None
+    assert self._configs is None
+    for p in paths:
+      assert os.path.exists(p), p
+    self._configs = paths
+
+  def verbose(self, verbose):
+    assert self._cmd is None
+    self._verbose = verbose
+
+  def build(self):
+    if self._cmd:
+      return self._cmd
+    assert self._injars is not None
+    assert self._outjar is not None
+    assert self._configs is not None
+    cmd = [
+      'java', '-jar', self._proguard_jar_path,
+      '-forceprocessing',
+    ]
+    if self._tested_apk_info_path:
+      assert len(self._configs) == 1
+      tested_apk_info = build_utils.ReadJson(self._tested_apk_info_path)
+      self._configs += tested_apk_info['configs']
+      self._injars = [
+          p for p in self._injars if not p in tested_apk_info['inputs']]
+      if not self._libraries:
+        self._libraries = []
+      self._libraries += tested_apk_info['inputs']
+      self._mapping = tested_apk_info['mapping']
+      cmd += [
+        '-dontobfuscate',
+        '-dontoptimize',
+        '-dontshrink',
+        '-dontskipnonpubliclibraryclassmembers',
+      ]
+
+    if self._mapping:
+      cmd += [
+        '-applymapping', self._mapping,
+      ]
+
+    if self._libraries:
+      cmd += [
+        '-libraryjars', ':'.join(self._libraries),
+      ]
+
+    cmd += [
+      '-injars', ':'.join(self._injars)
+    ]
+
+    for config_file in self._configs:
+      cmd += ['-include', config_file]
+
+    # The output jar must be specified after inputs.
+    cmd += [
+      '-outjars', self._outjar,
+      '-dump', self._outjar + '.dump',
+      '-printseeds', self._outjar + '.seeds',
+      '-printusage', self._outjar + '.usage',
+      '-printmapping', self._outjar + '.mapping',
+    ]
+
+    if self._verbose:
+      cmd.append('-verbose')
+
+    self._cmd = cmd
+    return self._cmd
+
+  def GetInputs(self):
+    self.build()
+    inputs = [self._proguard_jar_path] + self._configs + self._injars
+    if self._mapping:
+      inputs.append(self._mapping)
+    if self._libraries:
+      inputs += self._libraries
+    if self._tested_apk_info_path:
+      inputs += [self._tested_apk_info_path]
+    return inputs
+
+
+  def CheckOutput(self):
+    self.build()
+    # Proguard will skip writing these files if they would be empty. Create
+    # empty versions of them all now so that they are updated as the build
+    # expects.
+    open(self._outjar + '.dump', 'w').close()
+    open(self._outjar + '.seeds', 'w').close()
+    open(self._outjar + '.usage', 'w').close()
+    open(self._outjar + '.mapping', 'w').close()
+    # Warning: and Error: are sent to stderr, but messages and Note: are sent
+    # to stdout.
+    stdout_filter = None
+    stderr_filter = None
+    if not self._verbose:
+      stdout_filter = _ProguardOutputFilter()
+      stderr_filter = _ProguardOutputFilter()
+    build_utils.CheckOutput(self._cmd, print_stdout=True,
+                            print_stderr=True,
+                            stdout_filter=stdout_filter,
+                            stderr_filter=stderr_filter)
+
+    this_info = {
+      'inputs': self._injars,
+      'configs': self._configs,
+      'mapping': self._outjar + '.mapping',
+    }
+
+    build_utils.WriteJson(this_info, self._outjar + '.info')
+
diff --git a/build/android/gyp/write_build_config.py b/build/android/gyp/write_build_config.py
new file mode 100755
index 0000000..e0b727b
--- /dev/null
+++ b/build/android/gyp/write_build_config.py
@@ -0,0 +1,539 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a build_config file.
+
+The build_config file for a target is a json file containing information about
+how to build that target based on the target's dependencies. This includes
+things like: the javac classpath, the list of android resources dependencies,
+etc. It also includes the information needed to create the build_config for
+other targets that depend on that one.
+
+Android build scripts should not refer to the build_config directly, and the
+build specification should instead pass information in using the special
+file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
+of values in a json dict in a file and looks like this:
+  --python-arg=@FileArg(build_config_path:javac:classpath)
+
+Note: If paths to input files are passed in this way, it is important that:
+  1. inputs/deps of the action ensure that the files are available the first
+  time the action runs.
+  2. Either (a) or (b)
+    a. inputs/deps ensure that the action runs whenever one of the files changes
+    b. the files are added to the action's depfile
+"""
+
+import itertools
+import optparse
+import os
+import sys
+import xml.dom.minidom
+
+from util import build_utils
+from util import md5_check
+
+import write_ordered_libraries
+
+
+# Types that should never be used as a dependency of another build config.
+_ROOT_TYPES = ('android_apk', 'deps_dex', 'java_binary', 'resource_rewriter')
+# Types that should not allow code deps to pass through.
+_RESOURCE_TYPES = ('android_assets', 'android_resources')
+
+
+class AndroidManifest(object):
+  def __init__(self, path):
+    self.path = path
+    dom = xml.dom.minidom.parse(path)
+    manifests = dom.getElementsByTagName('manifest')
+    assert len(manifests) == 1
+    self.manifest = manifests[0]
+
+  def GetInstrumentation(self):
+    instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
+    if len(instrumentation_els) == 0:
+      return None
+    if len(instrumentation_els) != 1:
+      raise Exception(
+          'More than one <instrumentation> element found in %s' % self.path)
+    return instrumentation_els[0]
+
+  def CheckInstrumentation(self, expected_package):
+    instr = self.GetInstrumentation()
+    if not instr:
+      raise Exception('No <instrumentation> elements found in %s' % self.path)
+    instrumented_package = instr.getAttributeNS(
+        'http://schemas.android.com/apk/res/android', 'targetPackage')
+    if instrumented_package != expected_package:
+      raise Exception(
+          'Wrong instrumented package. Expected %s, got %s'
+          % (expected_package, instrumented_package))
+
+  def GetPackageName(self):
+    return self.manifest.getAttribute('package')
+
+
+dep_config_cache = {}
+def GetDepConfig(path):
+  if not path in dep_config_cache:
+    dep_config_cache[path] = build_utils.ReadJson(path)['deps_info']
+  return dep_config_cache[path]
+
+
+def DepsOfType(wanted_type, configs):
+  return [c for c in configs if c['type'] == wanted_type]
+
+
+def GetAllDepsConfigsInOrder(deps_config_paths):
+  def GetDeps(path):
+    return set(GetDepConfig(path)['deps_configs'])
+  return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
+
+
+def ResolveGroups(configs):
+  while True:
+    groups = DepsOfType('group', configs)
+    if not groups:
+      return configs
+    for config in groups:
+      index = configs.index(config)
+      expanded_configs = [GetDepConfig(p) for p in config['deps_configs']]
+      configs[index:index + 1] = expanded_configs
+
+
+class Deps(object):
+  def __init__(self, direct_deps_config_paths):
+    self.all_deps_config_paths = GetAllDepsConfigsInOrder(
+        direct_deps_config_paths)
+    self.direct_deps_configs = ResolveGroups(
+        [GetDepConfig(p) for p in direct_deps_config_paths])
+    self.all_deps_configs = [
+        GetDepConfig(p) for p in self.all_deps_config_paths]
+    self.direct_deps_config_paths = direct_deps_config_paths
+
+  def All(self, wanted_type=None):
+    if type is None:
+      return self.all_deps_configs
+    return DepsOfType(wanted_type, self.all_deps_configs)
+
+  def Direct(self, wanted_type=None):
+    if wanted_type is None:
+      return self.direct_deps_configs
+    return DepsOfType(wanted_type, self.direct_deps_configs)
+
+  def AllConfigPaths(self):
+    return self.all_deps_config_paths
+
+  def RemoveNonDirectDep(self, path):
+    if path in self.direct_deps_config_paths:
+      raise Exception('Cannot remove direct dep.')
+    self.all_deps_config_paths.remove(path)
+    self.all_deps_configs.remove(GetDepConfig(path))
+
+def _MergeAssets(all_assets):
+  """Merges all assets from the given deps.
+
+  Returns:
+    A tuple of lists: (compressed, uncompressed)
+    Each tuple entry is a list of "srcPath:zipPath". srcPath is the path of the
+    asset to add, and zipPath is the location within the zip (excluding assets/
+    prefix)
+  """
+  compressed = {}
+  uncompressed = {}
+  for asset_dep in all_assets:
+    entry = asset_dep['assets']
+    disable_compression = entry.get('disable_compression', False)
+    dest_map = uncompressed if disable_compression else compressed
+    other_map = compressed if disable_compression else uncompressed
+    outputs = entry.get('outputs', [])
+    for src, dest in itertools.izip_longest(entry['sources'], outputs):
+      if not dest:
+        dest = os.path.basename(src)
+      # Merge so that each path shows up in only one of the lists, and that
+      # deps of the same target override previous ones.
+      other_map.pop(dest, 0)
+      dest_map[dest] = src
+
+  def create_list(asset_map):
+    ret = ['%s:%s' % (src, dest) for dest, src in asset_map.iteritems()]
+    # Sort to ensure deterministic ordering.
+    ret.sort()
+    return ret
+
+  return create_list(compressed), create_list(uncompressed)
+
+
+def _FilterUnwantedDepsPaths(dep_paths, target_type):
+  # Don't allow root targets to be considered as a dep.
+  ret = [p for p in dep_paths if GetDepConfig(p)['type'] not in _ROOT_TYPES]
+
+  # Don't allow java libraries to cross through assets/resources.
+  if target_type in _RESOURCE_TYPES:
+    ret = [p for p in ret if GetDepConfig(p)['type'] in _RESOURCE_TYPES]
+  return ret
+
+
+def _AsInterfaceJar(jar_path):
+  return jar_path[:-3] + 'interface.jar'
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--build-config', help='Path to build_config output.')
+  parser.add_option(
+      '--type',
+      help='Type of this target (e.g. android_library).')
+  parser.add_option(
+      '--possible-deps-configs',
+      help='List of paths for dependency\'s build_config files. Some '
+      'dependencies may not write build_config files. Missing build_config '
+      'files are handled differently based on the type of this target.')
+
+  # android_resources options
+  parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
+  parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
+  parser.add_option('--r-text', help='Path to target\'s R.txt file.')
+  parser.add_option('--package-name',
+      help='Java package name for these resources.')
+  parser.add_option('--android-manifest', help='Path to android manifest.')
+  parser.add_option('--is-locale-resource', action='store_true',
+                    help='Whether it is locale resource.')
+
+  # android_assets options
+  parser.add_option('--asset-sources', help='List of asset sources.')
+  parser.add_option('--asset-renaming-sources',
+                    help='List of asset sources with custom destinations.')
+  parser.add_option('--asset-renaming-destinations',
+                    help='List of asset custom destinations.')
+  parser.add_option('--disable-asset-compression', action='store_true',
+                    help='Whether to disable asset compression.')
+
+  # java library options
+  parser.add_option('--jar-path', help='Path to target\'s jar output.')
+  parser.add_option('--supports-android', action='store_true',
+      help='Whether this library supports running on the Android platform.')
+  parser.add_option('--requires-android', action='store_true',
+      help='Whether this library requires running on the Android platform.')
+  parser.add_option('--bypass-platform-checks', action='store_true',
+      help='Bypass checks for support/require Android platform.')
+
+  # android library options
+  parser.add_option('--dex-path', help='Path to target\'s dex output.')
+
+  # native library options
+  parser.add_option('--native-libs', help='List of top-level native libs.')
+  parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.')
+
+  # apk options
+  parser.add_option('--apk-path', help='Path to the target\'s apk output.')
+  parser.add_option('--incremental-apk-path',
+                    help="Path to the target's incremental apk output.")
+  parser.add_option('--incremental-install-script-path',
+                    help="Path to the target's generated incremental install "
+                    "script.")
+
+  parser.add_option('--tested-apk-config',
+      help='Path to the build config of the tested apk (for an instrumentation '
+      'test apk).')
+  parser.add_option('--proguard-enabled', action='store_true',
+      help='Whether proguard is enabled for this apk.')
+  parser.add_option('--proguard-info',
+      help='Path to the proguard .info output for this apk.')
+  parser.add_option('--has-alternative-locale-resource', action='store_true',
+      help='Whether there is alternative-locale-resource in direct deps')
+
+  options, args = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  required_options_map = {
+      'java_binary': ['build_config', 'jar_path'],
+      'java_library': ['build_config', 'jar_path'],
+      'android_assets': ['build_config'],
+      'android_resources': ['build_config', 'resources_zip'],
+      'android_apk': ['build_config', 'jar_path', 'dex_path', 'resources_zip'],
+      'deps_dex': ['build_config', 'dex_path'],
+      'resource_rewriter': ['build_config'],
+      'group': ['build_config'],
+  }
+  required_options = required_options_map.get(options.type)
+  if not required_options:
+    raise Exception('Unknown type: <%s>' % options.type)
+
+  if options.native_libs:
+    required_options.append('readelf_path')
+
+  build_utils.CheckOptions(options, parser, required_options)
+
+  if options.type == 'java_library':
+    if options.supports_android and not options.dex_path:
+      raise Exception('java_library that supports Android requires a dex path.')
+
+    if options.requires_android and not options.supports_android:
+      raise Exception(
+          '--supports-android is required when using --requires-android')
+
+  possible_deps_config_paths = build_utils.ParseGypList(
+      options.possible_deps_configs)
+
+  unknown_deps = [
+      c for c in possible_deps_config_paths if not os.path.exists(c)]
+
+  direct_deps_config_paths = [
+      c for c in possible_deps_config_paths if not c in unknown_deps]
+  direct_deps_config_paths = _FilterUnwantedDepsPaths(direct_deps_config_paths,
+                                                      options.type)
+
+  deps = Deps(direct_deps_config_paths)
+  all_inputs = deps.AllConfigPaths() + build_utils.GetPythonDependencies()
+
+  # Remove other locale resources if there is alternative_locale_resource in
+  # direct deps.
+  if options.has_alternative_locale_resource:
+    alternative = [r['path'] for r in deps.Direct('android_resources')
+                   if r.get('is_locale_resource')]
+    # We can only have one locale resources in direct deps.
+    if len(alternative) != 1:
+      raise Exception('The number of locale resource in direct deps is wrong %d'
+                       % len(alternative))
+    unwanted = [r['path'] for r in deps.All('android_resources')
+                if r.get('is_locale_resource') and r['path'] not in alternative]
+    for p in unwanted:
+      deps.RemoveNonDirectDep(p)
+
+
+  direct_library_deps = deps.Direct('java_library')
+  all_library_deps = deps.All('java_library')
+
+  direct_resources_deps = deps.Direct('android_resources')
+  all_resources_deps = deps.All('android_resources')
+  # Resources should be ordered with the highest-level dependency first so that
+  # overrides are done correctly.
+  all_resources_deps.reverse()
+
+  if options.type == 'android_apk' and options.tested_apk_config:
+    tested_apk_deps = Deps([options.tested_apk_config])
+    tested_apk_resources_deps = tested_apk_deps.All('android_resources')
+    all_resources_deps = [
+        d for d in all_resources_deps if not d in tested_apk_resources_deps]
+
+  # Initialize some common config.
+  config = {
+    'deps_info': {
+      'name': os.path.basename(options.build_config),
+      'path': options.build_config,
+      'type': options.type,
+      'deps_configs': direct_deps_config_paths
+    }
+  }
+  deps_info = config['deps_info']
+
+  if (options.type in ('java_binary', 'java_library') and
+      not options.bypass_platform_checks):
+    deps_info['requires_android'] = options.requires_android
+    deps_info['supports_android'] = options.supports_android
+
+    deps_require_android = (all_resources_deps +
+        [d['name'] for d in all_library_deps if d['requires_android']])
+    deps_not_support_android = (
+        [d['name'] for d in all_library_deps if not d['supports_android']])
+
+    if deps_require_android and not options.requires_android:
+      raise Exception('Some deps require building for the Android platform: ' +
+          str(deps_require_android))
+
+    if deps_not_support_android and options.supports_android:
+      raise Exception('Not all deps support the Android platform: ' +
+          str(deps_not_support_android))
+
+  if options.type in ('java_binary', 'java_library', 'android_apk'):
+    javac_classpath = [c['jar_path'] for c in direct_library_deps]
+    java_full_classpath = [c['jar_path'] for c in all_library_deps]
+    deps_info['resources_deps'] = [c['path'] for c in all_resources_deps]
+    deps_info['jar_path'] = options.jar_path
+    if options.type == 'android_apk' or options.supports_android:
+      deps_info['dex_path'] = options.dex_path
+    if options.type == 'android_apk':
+      deps_info['apk_path'] = options.apk_path
+      deps_info['incremental_apk_path'] = options.incremental_apk_path
+      deps_info['incremental_install_script_path'] = (
+          options.incremental_install_script_path)
+
+    # Classpath values filled in below (after applying tested_apk_config).
+    config['javac'] = {}
+
+  if options.type in ('java_binary', 'java_library'):
+    # Only resources might have srcjars (normal srcjar targets are listed in
+    # srcjar_deps). A resource's srcjar contains the R.java file for those
+    # resources, and (like Android's default build system) we allow a library to
+    # refer to the resources in any of its dependents.
+    config['javac']['srcjars'] = [
+        c['srcjar'] for c in direct_resources_deps if 'srcjar' in c]
+
+    # Used to strip out R.class for android_prebuilt()s.
+    if options.type == 'java_library':
+      config['javac']['resource_packages'] = [
+          c['package_name'] for c in all_resources_deps if 'package_name' in c]
+
+  if options.type == 'android_apk':
+    # Apks will get their resources srcjar explicitly passed to the java step.
+    config['javac']['srcjars'] = []
+
+  if options.type == 'android_assets':
+    all_asset_sources = []
+    if options.asset_renaming_sources:
+      all_asset_sources.extend(
+          build_utils.ParseGypList(options.asset_renaming_sources))
+    if options.asset_sources:
+      all_asset_sources.extend(build_utils.ParseGypList(options.asset_sources))
+
+    deps_info['assets'] = {
+        'sources': all_asset_sources
+    }
+    if options.asset_renaming_destinations:
+      deps_info['assets']['outputs'] = (
+          build_utils.ParseGypList(options.asset_renaming_destinations))
+    if options.disable_asset_compression:
+      deps_info['assets']['disable_compression'] = True
+
+  if options.type == 'android_resources':
+    deps_info['resources_zip'] = options.resources_zip
+    if options.srcjar:
+      deps_info['srcjar'] = options.srcjar
+    if options.android_manifest:
+      manifest = AndroidManifest(options.android_manifest)
+      deps_info['package_name'] = manifest.GetPackageName()
+    if options.package_name:
+      deps_info['package_name'] = options.package_name
+    if options.r_text:
+      deps_info['r_text'] = options.r_text
+    if options.is_locale_resource:
+      deps_info['is_locale_resource'] = True
+
+  if options.type in ('android_resources','android_apk', 'resource_rewriter'):
+    config['resources'] = {}
+    config['resources']['dependency_zips'] = [
+        c['resources_zip'] for c in all_resources_deps]
+    config['resources']['extra_package_names'] = []
+    config['resources']['extra_r_text_files'] = []
+
+  if options.type == 'android_apk' or options.type == 'resource_rewriter':
+    config['resources']['extra_package_names'] = [
+        c['package_name'] for c in all_resources_deps if 'package_name' in c]
+    config['resources']['extra_r_text_files'] = [
+        c['r_text'] for c in all_resources_deps if 'r_text' in c]
+
+  if options.type in ['android_apk', 'deps_dex']:
+    deps_dex_files = [c['dex_path'] for c in all_library_deps]
+
+  proguard_enabled = options.proguard_enabled
+  if options.type == 'android_apk':
+    deps_info['proguard_enabled'] = proguard_enabled
+
+  if proguard_enabled:
+    deps_info['proguard_info'] = options.proguard_info
+    config['proguard'] = {}
+    proguard_config = config['proguard']
+    proguard_config['input_paths'] = [options.jar_path] + java_full_classpath
+
+  # An instrumentation test apk should exclude the dex files that are in the apk
+  # under test.
+  if options.type == 'android_apk' and options.tested_apk_config:
+    tested_apk_library_deps = tested_apk_deps.All('java_library')
+    tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
+    # Include in the classpath classes that are added directly to the apk under
+    # test (those that are not a part of a java_library).
+    tested_apk_config = GetDepConfig(options.tested_apk_config)
+    javac_classpath.append(tested_apk_config['jar_path'])
+    # Exclude dex files from the test apk that exist within the apk under test.
+    deps_dex_files = [
+        p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
+
+    expected_tested_package = tested_apk_config['package_name']
+    AndroidManifest(options.android_manifest).CheckInstrumentation(
+        expected_tested_package)
+    if tested_apk_config['proguard_enabled']:
+      assert proguard_enabled, ('proguard must be enabled for instrumentation'
+          ' apks if it\'s enabled for the tested apk')
+
+  # Dependencies for the final dex file of an apk or a 'deps_dex'.
+  if options.type in ['android_apk', 'deps_dex']:
+    config['final_dex'] = {}
+    dex_config = config['final_dex']
+    dex_config['dependency_dex_files'] = deps_dex_files
+
+  if options.type in ('java_binary', 'java_library', 'android_apk'):
+    config['javac']['classpath'] = javac_classpath
+    config['javac']['interface_classpath'] = [
+        _AsInterfaceJar(p) for p in javac_classpath]
+    config['java'] = {
+      'full_classpath': java_full_classpath
+    }
+
+  if options.type == 'android_apk':
+    dependency_jars = [c['jar_path'] for c in all_library_deps]
+    all_interface_jars = [
+        _AsInterfaceJar(p) for p in dependency_jars + [options.jar_path]]
+    config['dist_jar'] = {
+      'dependency_jars': dependency_jars,
+      'all_interface_jars': all_interface_jars,
+    }
+    manifest = AndroidManifest(options.android_manifest)
+    deps_info['package_name'] = manifest.GetPackageName()
+    if not options.tested_apk_config and manifest.GetInstrumentation():
+      # This must then have instrumentation only for itself.
+      manifest.CheckInstrumentation(manifest.GetPackageName())
+
+    library_paths = []
+    java_libraries_list_holder = [None]
+    libraries = build_utils.ParseGypList(options.native_libs or '[]')
+    if libraries:
+      def recompute_ordered_libraries():
+        libraries_dir = os.path.dirname(libraries[0])
+        write_ordered_libraries.SetReadelfPath(options.readelf_path)
+        write_ordered_libraries.SetLibraryDirs([libraries_dir])
+        all_deps = (
+            write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries(
+                libraries))
+        # Create a java literal array with the "base" library names:
+        # e.g. libfoo.so -> foo
+        java_libraries_list_holder[0] = ('{%s}' % ','.join(
+            ['"%s"' % s[3:-3] for s in all_deps]))
+        library_paths.extend(
+            write_ordered_libraries.FullLibraryPath(x) for x in all_deps)
+
+      # This step takes about 600ms on a z620 for chrome_apk, so it's worth
+      # caching.
+      md5_check.CallAndRecordIfStale(
+          recompute_ordered_libraries,
+          record_path=options.build_config + '.nativelibs.md5.stamp',
+          input_paths=libraries,
+          output_paths=[options.build_config])
+      if not library_paths:
+        prev_config = build_utils.ReadJson(options.build_config)
+        java_libraries_list_holder[0] = (
+            prev_config['native']['java_libraries_list'])
+        library_paths.extend(prev_config['native']['libraries'])
+
+    all_inputs.extend(library_paths)
+    config['native'] = {
+      'libraries': library_paths,
+      'java_libraries_list': java_libraries_list_holder[0],
+    }
+    config['assets'], config['uncompressed_assets'] = (
+        _MergeAssets(deps.All('android_assets')))
+
+  build_utils.WriteJson(config, options.build_config, only_if_changed=True)
+
+  if options.depfile:
+    build_utils.WriteDepfile(options.depfile, all_inputs)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/write_ordered_libraries.py b/build/android/gyp/write_ordered_libraries.py
new file mode 100755
index 0000000..0fc9a8c
--- /dev/null
+++ b/build/android/gyp/write_ordered_libraries.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes dependency ordered list of native libraries.
+
+The list excludes any Android system libraries, as those are not bundled with
+the APK.
+
+This list of libraries is used for several steps of building an APK.
+In the component build, the --input-libraries only needs to be the top-level
+library (i.e. libcontent_shell_content_view). This will then use readelf to
+inspect the shared libraries and determine the full list of (non-system)
+libraries that should be included in the APK.
+"""
+
+# TODO(cjhopman): See if we can expose the list of library dependencies from
+# gyp, rather than calculating it ourselves.
+# http://crbug.com/225558
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_utils
+
+_readelf = None
+_library_dirs = None
+
+_library_re = re.compile(
+    '.*NEEDED.*Shared library: \[(?P<library_name>.+)\]')
+
+
+def SetReadelfPath(path):
+  global _readelf
+  _readelf = path
+
+
+def SetLibraryDirs(dirs):
+  global _library_dirs
+  _library_dirs = dirs
+
+
+def FullLibraryPath(library_name):
+  assert _library_dirs is not None
+  for directory in _library_dirs:
+    path = '%s/%s' % (directory, library_name)
+    if os.path.exists(path):
+      return path
+  return library_name
+
+
+def IsSystemLibrary(library_name):
+  # If the library doesn't exist in the libraries directory, assume that it is
+  # an Android system library.
+  return not os.path.exists(FullLibraryPath(library_name))
+
+
+def CallReadElf(library_or_executable):
+  assert _readelf is not None
+  readelf_cmd = [_readelf,
+                 '-d',
+                 FullLibraryPath(library_or_executable)]
+  return build_utils.CheckOutput(readelf_cmd)
+
+
+def GetDependencies(library_or_executable):
+  elf = CallReadElf(library_or_executable)
+  return set(_library_re.findall(elf))
+
+
+def GetNonSystemDependencies(library_name):
+  all_deps = GetDependencies(library_name)
+  return set((lib for lib in all_deps if not IsSystemLibrary(lib)))
+
+
+def GetSortedTransitiveDependencies(libraries):
+  """Returns all transitive library dependencies in dependency order."""
+  return build_utils.GetSortedTransitiveDependencies(
+      libraries, GetNonSystemDependencies)
+
+
+def GetSortedTransitiveDependenciesForBinaries(binaries):
+  if binaries[0].endswith('.so'):
+    libraries = [os.path.basename(lib) for lib in binaries]
+  else:
+    assert len(binaries) == 1
+    all_deps = GetDependencies(binaries[0])
+    libraries = [lib for lib in all_deps if not IsSystemLibrary(lib)]
+
+  return GetSortedTransitiveDependencies(libraries)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--input-libraries',
+      help='A list of top-level input libraries.')
+  parser.add_option('--libraries-dir',
+      help='The directory which contains shared libraries.')
+  parser.add_option('--readelf', help='Path to the readelf binary.')
+  parser.add_option('--output', help='Path to the generated .json file.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  SetReadelfPath(options.readelf)
+  SetLibraryDirs(options.libraries_dir.split(','))
+
+  libraries = build_utils.ParseGypList(options.input_libraries)
+  if len(libraries):
+    libraries = GetSortedTransitiveDependenciesForBinaries(libraries)
+
+  # Convert to "base" library names: e.g. libfoo.so -> foo
+  java_libraries_list = (
+      '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries]))
+
+  out_json = {
+      'libraries': libraries,
+      'lib_paths': [FullLibraryPath(l) for l in libraries],
+      'java_libraries_list': java_libraries_list
+      }
+  build_utils.WriteJson(
+      out_json,
+      options.output,
+      only_if_changed=True)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        libraries + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
+
diff --git a/build/android/gyp/zip.py b/build/android/gyp/zip.py
new file mode 100755
index 0000000..51322df
--- /dev/null
+++ b/build/android/gyp/zip.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Archives a set of files.
+"""
+
+import optparse
+import sys
+
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--input-dir', help='Directory of files to archive.')
+  parser.add_option('--output', help='Path to output archive.')
+  options, _ = parser.parse_args()
+
+  inputs = build_utils.FindInDirectory(options.input_dir, '*')
+  build_utils.DoZip(inputs, options.output, options.input_dir)
+
+
+if __name__ == '__main__':
+  sys.exit(main())